diff --git a/openpype/client/entities.py b/openpype/client/entities.py
index f1f1d30214..3d2730a17c 100644
--- a/openpype/client/entities.py
+++ b/openpype/client/entities.py
@@ -1455,7 +1455,7 @@ def get_workfile_info(
"""
## Custom data storage:
- Settings - OP settings overrides and local settings
-- Logging - logs from PypeLogger
+- Logging - logs from Logger
- Webpublisher - jobs
- Ftrack - events
- Maya - Shaders
diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py
index dc16aaeac5..8cdf9c407e 100644
--- a/openpype/hosts/aftereffects/api/lib.py
+++ b/openpype/hosts/aftereffects/api/lib.py
@@ -5,11 +5,12 @@ import json
import contextlib
import traceback
import logging
+from functools import partial
from Qt import QtWidgets
from openpype.pipeline import install_host
-from openpype.lib.remote_publish import headless_publish
+from openpype.modules import ModulesManager
from openpype.tools.utils import host_tools
from .launch_logic import ProcessLauncher, get_stub
@@ -37,10 +38,18 @@ def main(*subprocess_args):
launcher.start()
if os.environ.get("HEADLESS_PUBLISH"):
- launcher.execute_in_main_thread(lambda: headless_publish(
- log,
- "CloseAE",
- os.environ.get("IS_TEST")))
+ manager = ModulesManager()
+ webpublisher_addon = manager["webpublisher"]
+
+ launcher.execute_in_main_thread(
+ partial(
+ webpublisher_addon.headless_publish,
+ log,
+ "CloseAE",
+ os.environ.get("IS_TEST")
+ )
+ )
+
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
save = False
if os.getenv("WORKFILES_SAVE_AS"):
diff --git a/openpype/hosts/blender/api/lib.py b/openpype/hosts/blender/api/lib.py
index 20098c0fe8..9cd1ace821 100644
--- a/openpype/hosts/blender/api/lib.py
+++ b/openpype/hosts/blender/api/lib.py
@@ -234,7 +234,7 @@ def lsattrs(attrs: Dict) -> List:
def read(node: bpy.types.bpy_struct_meta_idprop):
"""Return user-defined attributes from `node`"""
- data = dict(node.get(pipeline.AVALON_PROPERTY))
+ data = dict(node.get(pipeline.AVALON_PROPERTY, {}))
# Ignore hidden/internal data
data = {
diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py
index 4f8410da74..e0e09277df 100644
--- a/openpype/hosts/blender/api/ops.py
+++ b/openpype/hosts/blender/api/ops.py
@@ -26,7 +26,7 @@ PREVIEW_COLLECTIONS: Dict = dict()
# This seems like a good value to keep the Qt app responsive and doesn't slow
# down Blender. At least on macOS I the interace of Blender gets very laggy if
# you make it smaller.
-TIMER_INTERVAL: float = 0.01
+TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
class BlenderApplication(QtWidgets.QApplication):
@@ -164,6 +164,12 @@ def _process_app_events() -> Optional[float]:
dialog.setDetailedText(detail)
dialog.exec_()
+ # Refresh Manager
+ if GlobalClass.app:
+ manager = GlobalClass.app.get_window("WM_OT_avalon_manager")
+ if manager:
+ manager.refresh()
+
if not GlobalClass.is_windows:
if OpenFileCacher.opening_file:
return TIMER_INTERVAL
@@ -192,10 +198,11 @@ class LaunchQtApp(bpy.types.Operator):
self._app = BlenderApplication.get_app()
GlobalClass.app = self._app
- bpy.app.timers.register(
- _process_app_events,
- persistent=True
- )
+ if not bpy.app.timers.is_registered(_process_app_events):
+ bpy.app.timers.register(
+ _process_app_events,
+ persistent=True
+ )
def execute(self, context):
"""Execute the operator.
diff --git a/openpype/hosts/blender/blender_addon/startup/init.py b/openpype/hosts/blender/blender_addon/startup/init.py
index 13a4b8a7a1..8dbff8a91d 100644
--- a/openpype/hosts/blender/blender_addon/startup/init.py
+++ b/openpype/hosts/blender/blender_addon/startup/init.py
@@ -1,4 +1,10 @@
from openpype.pipeline import install_host
from openpype.hosts.blender import api
-install_host(api)
+
+def register():
+ install_host(api)
+
+
+def unregister():
+ pass
diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py
index 8c7b3a2e74..eb91def090 100644
--- a/openpype/hosts/celaction/api/cli.py
+++ b/openpype/hosts/celaction/api/cli.py
@@ -14,7 +14,7 @@ from openpype.tools.utils import host_tools
from openpype.pipeline import install_openpype_plugins
-log = Logger().get_logger("Celaction_cli_publisher")
+log = Logger.get_logger("Celaction_cli_publisher")
publish_host = "celaction"
diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py
index 54002f9f51..54a6c94b60 100644
--- a/openpype/hosts/fusion/api/pipeline.py
+++ b/openpype/hosts/fusion/api/pipeline.py
@@ -8,7 +8,7 @@ import contextlib
import pyblish.api
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
@@ -20,7 +20,7 @@ from openpype.pipeline import (
)
import openpype.hosts.fusion
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
diff --git a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py
index de8fc4b3b4..870e74280a 100644
--- a/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py
+++ b/openpype/hosts/fusion/utility_scripts/__OpenPype_Menu__.py
@@ -1,14 +1,12 @@
import os
import sys
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.pipeline import (
install_host,
registered_host,
)
-log = Logger().get_logger(__name__)
-
def main(env):
from openpype.hosts.fusion import api
@@ -17,6 +15,7 @@ def main(env):
# activate resolve from pype
install_host(api)
+ log = Logger.get_logger(__name__)
log.info(f"Registered host: {registered_host()}")
menu.launch_openpype_menu()
diff --git a/openpype/hosts/harmony/__init__.py b/openpype/hosts/harmony/__init__.py
index d2f710d83d..9177eaa285 100644
--- a/openpype/hosts/harmony/__init__.py
+++ b/openpype/hosts/harmony/__init__.py
@@ -1,11 +1,10 @@
-import os
+from .addon import (
+ HARMONY_HOST_DIR,
+ HarmonyAddon,
+)
-def add_implementation_envs(env, _app):
- """Modify environments to contain all required for implementation."""
- openharmony_path = os.path.join(
- os.environ["OPENPYPE_REPOS_ROOT"], "openpype", "hosts",
- "harmony", "vendor", "OpenHarmony"
- )
- # TODO check if is already set? What to do if is already set?
- env["LIB_OPENHARMONY_PATH"] = openharmony_path
+__all__ = (
+ "HARMONY_HOST_DIR",
+ "HarmonyAddon",
+)
diff --git a/openpype/hosts/harmony/addon.py b/openpype/hosts/harmony/addon.py
new file mode 100644
index 0000000000..b051d68abb
--- /dev/null
+++ b/openpype/hosts/harmony/addon.py
@@ -0,0 +1,24 @@
+import os
+from openpype.modules import OpenPypeModule
+from openpype.modules.interfaces import IHostModule
+
+HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class HarmonyAddon(OpenPypeModule, IHostModule):
+ name = "harmony"
+ host_name = "harmony"
+
+ def initialize(self, module_settings):
+ self.enabled = True
+
+ def add_implementation_envs(self, env, _app):
+ """Modify environments to contain all required for implementation."""
+ openharmony_path = os.path.join(
+ HARMONY_HOST_DIR, "vendor", "OpenHarmony"
+ )
+ # TODO check if is already set? What to do if is already set?
+ env["LIB_OPENHARMONY_PATH"] = openharmony_path
+
+ def get_workfile_extensions(self):
+ return [".zip"]
diff --git a/openpype/hosts/harmony/api/pipeline.py b/openpype/hosts/harmony/api/pipeline.py
index 4d71b9380d..4b9849c190 100644
--- a/openpype/hosts/harmony/api/pipeline.py
+++ b/openpype/hosts/harmony/api/pipeline.py
@@ -14,14 +14,14 @@ from openpype.pipeline import (
)
from openpype.pipeline.load import get_outdated_containers
from openpype.pipeline.context_tools import get_current_project_asset
-import openpype.hosts.harmony
+
+from openpype.hosts.harmony import HARMONY_HOST_DIR
import openpype.hosts.harmony.api as harmony
log = logging.getLogger("openpype.hosts.harmony")
-HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.harmony.__file__))
-PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
+PLUGINS_DIR = os.path.join(HARMONY_HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
diff --git a/openpype/hosts/harmony/api/workio.py b/openpype/hosts/harmony/api/workio.py
index ab1cb9b1a9..8df5ede917 100644
--- a/openpype/hosts/harmony/api/workio.py
+++ b/openpype/hosts/harmony/api/workio.py
@@ -2,8 +2,6 @@
import os
import shutil
-from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
-
from .lib import (
ProcessContext,
get_local_harmony_path,
@@ -16,7 +14,7 @@ save_disabled = False
def file_extensions():
- return HOST_WORKFILE_EXTENSIONS["harmony"]
+ return [".zip"]
def has_unsaved_changes():
diff --git a/openpype/hosts/hiero/api/events.py b/openpype/hosts/hiero/api/events.py
index 59fd278a81..862a2607c1 100644
--- a/openpype/hosts/hiero/api/events.py
+++ b/openpype/hosts/hiero/api/events.py
@@ -1,7 +1,6 @@
import os
import hiero.core.events
-from openpype.api import Logger
-from openpype.lib import register_event_callback
+from openpype.lib import Logger, register_event_callback
from .lib import (
sync_avalon_data_to_workfile,
launch_workfiles_app,
@@ -11,7 +10,7 @@ from .lib import (
from .tags import add_tags_to_workfile
from .menu import update_menu_task_label
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
def startupCompleted(event):
diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py
index 2f66f3ddd7..e288cea2b1 100644
--- a/openpype/hosts/hiero/api/lib.py
+++ b/openpype/hosts/hiero/api/lib.py
@@ -21,7 +21,7 @@ from openpype.client import (
)
from openpype.settings import get_anatomy_settings
from openpype.pipeline import legacy_io, Anatomy
-from openpype.api import Logger
+from openpype.lib import Logger
from . import tags
try:
@@ -34,7 +34,7 @@ except ImportError:
# from opentimelineio import opentime
# from pprint import pformat
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
self = sys.modules[__name__]
self._has_been_setup = False
diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py
index b243a38b06..dacfd338bb 100644
--- a/openpype/hosts/hiero/api/pipeline.py
+++ b/openpype/hosts/hiero/api/pipeline.py
@@ -6,7 +6,7 @@ import contextlib
from collections import OrderedDict
from pyblish import api as pyblish
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.pipeline import (
schema,
register_creator_plugin_path,
@@ -18,7 +18,7 @@ from openpype.pipeline import (
from openpype.tools.utils import host_tools
from . import lib, menu, events
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
# plugin paths
API_DIR = os.path.dirname(os.path.abspath(__file__))
diff --git a/openpype/hosts/hiero/api/plugin.py b/openpype/hosts/hiero/api/plugin.py
index 28a9dfb492..77fedbbbdc 100644
--- a/openpype/hosts/hiero/api/plugin.py
+++ b/openpype/hosts/hiero/api/plugin.py
@@ -9,11 +9,12 @@ from Qt import QtWidgets, QtCore
import qargparse
import openpype.api as openpype
+from openpype.lib import Logger
from openpype.pipeline import LoaderPlugin, LegacyCreator
from openpype.pipeline.context_tools import get_current_project_asset
from . import lib
-log = openpype.Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
def load_stylesheet():
diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py
index e50ebfccad..39d821f620 100644
--- a/openpype/hosts/maya/api/plugin.py
+++ b/openpype/hosts/maya/api/plugin.py
@@ -4,6 +4,7 @@ from maya import cmds
import qargparse
+from openpype.lib import Logger
from openpype.pipeline import (
LegacyCreator,
LoaderPlugin,
@@ -50,9 +51,7 @@ def get_reference_node(members, log=None):
# Warn the user when we're taking the highest reference node
if len(references) > 1:
if not log:
- from openpype.lib import PypeLogger
-
- log = PypeLogger().get_logger(__name__)
+ log = Logger.get_logger(__name__)
log.warning("More than one reference node found in "
"container, using highest reference node: "
diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py
index b126de4ceb..962f31c177 100644
--- a/openpype/hosts/nuke/api/__init__.py
+++ b/openpype/hosts/nuke/api/__init__.py
@@ -26,8 +26,8 @@ from .lib import (
maintained_selection,
reset_selection,
get_view_process_node,
- duplicate_node
-
+ duplicate_node,
+ convert_knob_value_to_correct_type
)
from .utils import (
@@ -59,6 +59,7 @@ __all__ = (
"reset_selection",
"get_view_process_node",
"duplicate_node",
+ "convert_knob_value_to_correct_type",
"colorspace_exists_on_node",
"get_colorspace_list"
diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py
index a53d932db1..b14f1a1eb1 100644
--- a/openpype/hosts/nuke/api/lib.py
+++ b/openpype/hosts/nuke/api/lib.py
@@ -1593,28 +1593,35 @@ def set_node_knobs_from_settings(node, knob_settings, **kwargs):
if not knob_value:
continue
- # first convert string types to string
- # just to ditch unicode
- if isinstance(knob_value, six.text_type):
- knob_value = str(knob_value)
-
- # set correctly knob types
- if knob_type == "bool":
- knob_value = bool(knob_value)
- elif knob_type == "decimal_number":
- knob_value = float(knob_value)
- elif knob_type == "number":
- knob_value = int(knob_value)
- elif knob_type == "text":
- knob_value = knob_value
- elif knob_type == "color_gui":
- knob_value = color_gui_to_int(knob_value)
- elif knob_type in ["2d_vector", "3d_vector", "color"]:
- knob_value = [float(v) for v in knob_value]
+ knob_value = convert_knob_value_to_correct_type(
+ knob_type, knob_value)
node[knob_name].setValue(knob_value)
+def convert_knob_value_to_correct_type(knob_type, knob_value):
+ # first convert string types to string
+ # just to ditch unicode
+ if isinstance(knob_value, six.text_type):
+ knob_value = str(knob_value)
+
+ # set correctly knob types
+ if knob_type == "bool":
+ knob_value = bool(knob_value)
+ elif knob_type == "decimal_number":
+ knob_value = float(knob_value)
+ elif knob_type == "number":
+ knob_value = int(knob_value)
+ elif knob_type == "text":
+ knob_value = knob_value
+ elif knob_type == "color_gui":
+ knob_value = color_gui_to_int(knob_value)
+ elif knob_type in ["2d_vector", "3d_vector", "color"]:
+ knob_value = [float(v) for v in knob_value]
+
+ return knob_value
+
+
def color_gui_to_int(color_gui):
hex_value = (
"0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui)
@@ -1945,15 +1952,25 @@ class WorkfileSettings(object):
if not write_node:
return
- # write all knobs to node
- for knob in nuke_imageio_writes["knobs"]:
- value = knob["value"]
- if isinstance(value, six.text_type):
- value = str(value)
- if str(value).startswith("0x"):
- value = int(value, 16)
+ try:
+ # write all knobs to node
+ for knob in nuke_imageio_writes["knobs"]:
+ value = knob["value"]
+ if isinstance(value, six.text_type):
+ value = str(value)
+ if str(value).startswith("0x"):
+ value = int(value, 16)
- write_node[knob["name"]].setValue(value)
+ log.debug("knob: {}| value: {}".format(
+ knob["name"], value
+ ))
+ write_node[knob["name"]].setValue(value)
+ except TypeError:
+ log.warning(
+ "Legacy workflow didnt work, switching to current")
+
+ set_node_knobs_from_settings(
+ write_node, nuke_imageio_writes["knobs"])
def set_reads_colorspace(self, read_clrs_inputs):
""" Setting colorspace to Read nodes
@@ -2010,12 +2027,14 @@ class WorkfileSettings(object):
# get imageio
nuke_colorspace = get_nuke_imageio_settings()
+ log.info("Setting colorspace to workfile...")
try:
self.set_root_colorspace(nuke_colorspace["workfile"])
except AttributeError:
msg = "set_colorspace(): missing `workfile` settings in template"
nuke.message(msg)
+ log.info("Setting colorspace to viewers...")
try:
self.set_viewers_colorspace(nuke_colorspace["viewer"])
except AttributeError:
@@ -2023,24 +2042,18 @@ class WorkfileSettings(object):
nuke.message(msg)
log.error(msg)
+ log.info("Setting colorspace to write nodes...")
try:
self.set_writes_colorspace()
except AttributeError as _error:
nuke.message(_error)
log.error(_error)
+ log.info("Setting colorspace to read nodes...")
read_clrs_inputs = nuke_colorspace["regexInputs"].get("inputs", [])
if read_clrs_inputs:
self.set_reads_colorspace(read_clrs_inputs)
- try:
- for key in nuke_colorspace:
- log.debug("Preset's colorspace key: {}".format(key))
- except TypeError:
- msg = "Nuke is not in templates! Contact your supervisor!"
- nuke.message(msg)
- log.error(msg)
-
def reset_frame_range_handles(self):
"""Set frame range to current asset"""
@@ -2227,10 +2240,9 @@ def get_write_node_template_attr(node):
subset=avalon_knob_data["subset"]
)
+
# collecting correct data
- correct_data = OrderedDict({
- "file": get_render_path(node)
- })
+ correct_data = OrderedDict()
# adding imageio knob presets
for k, v in nuke_imageio_writes.items():
diff --git a/openpype/hosts/nuke/plugins/load/actions.py b/openpype/hosts/nuke/plugins/load/actions.py
index d364a4f3a1..69f56c7305 100644
--- a/openpype/hosts/nuke/plugins/load/actions.py
+++ b/openpype/hosts/nuke/plugins/load/actions.py
@@ -2,10 +2,10 @@
"""
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.pipeline import load
-log = Logger().get_logger(__name__)
+log = Logger.get_logger(__name__)
class SetFrameRangeLoader(load.LoaderPlugin):
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml
new file mode 100644
index 0000000000..1097909a5f
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_asset_name.xml
@@ -0,0 +1,18 @@
+
+
+
+ Shot/Asset mame
+
+## Invalid Shot/Asset name in subset
+
+Following Node with name `{node_name}`:
+Is in context of `{correct_name}` but Node _asset_ knob is set as `{wrong_name}`.
+
+### How to repair?
+
+1. Either use Repair or Select button.
+2. If you chose Select then rename asset knob to correct name.
+3. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml
new file mode 100644
index 0000000000..ab1b650773
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_backdrop.xml
@@ -0,0 +1,36 @@
+
+
+
+ Found multiple outputs
+
+## Invalid output amount
+
+Backdrop is having more than one outgoing connections.
+
+### How to repair?
+
+1. Use button `Center node in node graph` and navigate to the backdrop.
+2. Reorganize nodes the way only one outgoing connection is present.
+3. Hit reload button on the publisher.
+
+
+### How could this happen?
+
+More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream.
+
+
+
+ Empty backdrop
+
+## Invalid empty backdrop
+
+Backdrop is empty and no nodes are found above it.
+
+### How to repair?
+
+1. Use button `Center node in node graph` and navigate to the backdrop.
+2. Add any node above it or delete it.
+3. Hit reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml
new file mode 100644
index 0000000000..f39a41a4f9
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_gizmo.xml
@@ -0,0 +1,36 @@
+
+
+
+ Found multiple outputs
+
+## Invalid amount of Output nodes
+
+Group node `{node_name}` is having more than one Output node.
+
+### How to repair?
+
+1. Use button `Open Group`.
+2. Remove redundant Output node.
+3. Hit reload button on the publisher.
+
+
+### How could this happen?
+
+Perhaps you had created exciently more than one Output node.
+
+
+
+ Missing Input nodes
+
+## Missing Input nodes
+
+Make sure there is at least one connected Input node inside the group node with name `{node_name}`
+
+### How to repair?
+
+1. Use button `Open Group`.
+2. Add at least one Input node and connect to other nodes.
+3. Hit reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml
new file mode 100644
index 0000000000..76c184f653
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_knobs.xml
@@ -0,0 +1,18 @@
+
+
+
+ Knobs value
+
+## Invalid node's knobs values
+
+Following node knobs needs to be repaired:
+
+{invalid_items}
+
+### How to repair?
+
+1. Use Repair button.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml
new file mode 100644
index 0000000000..08a88a993e
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_output_resolution.xml
@@ -0,0 +1,16 @@
+
+
+
+ Output format
+
+## Invalid format setting
+
+Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`.
+
+### How to repair?
+
+1. Use Repair button.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml
new file mode 100644
index 0000000000..6fe5d5d43e
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_proxy_mode.xml
@@ -0,0 +1,16 @@
+
+
+
+ Proxy mode
+
+## Invalid proxy mode value
+
+Nuke is set to use Proxy. This is not supported by publisher.
+
+### How to repair?
+
+1. Use Repair button.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml
new file mode 100644
index 0000000000..434081c269
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_rendered_frames.xml
@@ -0,0 +1,17 @@
+
+
+
+ Rendered Frames
+
+## Missing Rendered Frames
+
+Render node "{node_name}" is set to "Use existing frames", but frames are missing.
+
+### How to repair?
+
+1. Use Repair button.
+2. Set different target.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml
new file mode 100644
index 0000000000..871fc629ce
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_script_attributes.xml
@@ -0,0 +1,18 @@
+
+
+
+ Script attributes
+
+## Invalid Script attributes
+
+Following script root attributes need to be fixed:
+
+{failed_attributes}
+
+### How to repair?
+
+1. Use Repair.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml
new file mode 100644
index 0000000000..cdf85102bc
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/help/validate_write_nodes.xml
@@ -0,0 +1,18 @@
+
+
+
+ Knobs values
+
+## Invalid node's knobs values
+
+Following write node knobs needs to be repaired:
+
+{xml_msg}
+
+### How to repair?
+
+1. Use Repair button.
+2. Hit Reload button on the publisher.
+
+
+
\ No newline at end of file
diff --git a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py
index 7349a8f424..822f405a6f 100644
--- a/openpype/hosts/nuke/plugins/publish/precollect_workfile.py
+++ b/openpype/hosts/nuke/plugins/publish/precollect_workfile.py
@@ -8,6 +8,7 @@ from openpype.hosts.nuke.api.lib import (
add_publish_knob,
get_avalon_knob_data
)
+from openpype.pipeline import KnownPublishError
class CollectWorkfile(pyblish.api.ContextPlugin):
@@ -22,6 +23,12 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
current_file = os.path.normpath(nuke.root().name())
+ if current_file.lower() == "root":
+ raise KnownPublishError(
+ "Workfile is not correct file name. \n"
+ "Use workfile tool to manage the name correctly."
+ )
+
knob_data = get_avalon_knob_data(root)
add_publish_knob(root)
diff --git a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py
similarity index 74%
rename from openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py
rename to openpype/hosts/nuke/plugins/publish/validate_asset_name.py
index 842f74b6f6..7647471f8a 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_instance_in_context.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_asset_name.py
@@ -3,20 +3,17 @@
from __future__ import absolute_import
import nuke
-
import pyblish.api
import openpype.api
-from openpype.hosts.nuke.api.lib import (
- recreate_instance,
- reset_selection,
- select_nodes
-)
+import openpype.hosts.nuke.api.lib as nlib
+import openpype.hosts.nuke.api as nuke_api
+from openpype.pipeline import PublishXmlValidationError
class SelectInvalidInstances(pyblish.api.Action):
"""Select invalid instances in Outliner."""
- label = "Select Instances"
+ label = "Select"
icon = "briefcase"
on = "failed"
@@ -39,6 +36,7 @@ class SelectInvalidInstances(pyblish.api.Action):
instances = pyblish.api.instances_by_plugin(failed, plugin)
if instances:
+ self.deselect()
self.log.info(
"Selecting invalid nodes: %s" % ", ".join(
[str(x) for x in instances]
@@ -50,12 +48,12 @@ class SelectInvalidInstances(pyblish.api.Action):
self.deselect()
def select(self, instances):
- select_nodes(
+ nlib.select_nodes(
[nuke.toNode(str(x)) for x in instances]
)
def deselect(self):
- reset_selection()
+ nlib.reset_selection()
class RepairSelectInvalidInstances(pyblish.api.Action):
@@ -85,12 +83,12 @@ class RepairSelectInvalidInstances(pyblish.api.Action):
context_asset = context.data["assetEntity"]["name"]
for instance in instances:
origin_node = instance[0]
- recreate_instance(
+ nuke_api.lib.recreate_instance(
origin_node, avalon_data={"asset": context_asset}
)
-class ValidateInstanceInContext(pyblish.api.InstancePlugin):
+class ValidateCorrectAssetName(pyblish.api.InstancePlugin):
"""Validator to check if instance asset match context asset.
When working in per-shot style you always publish data in context of
@@ -99,15 +97,31 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin):
Action on this validator will select invalid instances in Outliner.
"""
-
order = openpype.api.ValidateContentsOrder
- label = "Instance in same Context"
+ label = "Validate correct asset name"
hosts = ["nuke"]
- actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
+ actions = [
+ SelectInvalidInstances,
+ RepairSelectInvalidInstances
+ ]
optional = True
def process(self, instance):
asset = instance.data.get("asset")
context_asset = instance.context.data["assetEntity"]["name"]
- msg = "{} has asset {}".format(instance.name, asset)
- assert asset == context_asset, msg
+
+ msg = (
+ "Instance `{}` has wrong shot/asset name:\n"
+ "Correct: `{}` | Wrong: `{}`").format(
+ instance.name, asset, context_asset)
+
+ self.log.debug(msg)
+
+ if asset != context_asset:
+ raise PublishXmlValidationError(
+ self, msg, formatting_data={
+ "node_name": instance[0]["name"].value(),
+ "wrong_name": asset,
+ "correct_name": context_asset
+ }
+ )
diff --git a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py
index e2843d146e..17dc79dc56 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_backdrop.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_backdrop.py
@@ -1,6 +1,7 @@
import nuke
import pyblish
from openpype.hosts.nuke.api.lib import maintained_selection
+from openpype.pipeline import PublishXmlValidationError
class SelectCenterInNodeGraph(pyblish.api.Action):
@@ -47,8 +48,9 @@ class SelectCenterInNodeGraph(pyblish.api.Action):
@pyblish.api.log
class ValidateBackdrop(pyblish.api.InstancePlugin):
- """Validate amount of nodes on backdrop node in case user
- forgotten to add nodes above the publishing backdrop node"""
+ """ Validate amount of nodes on backdrop node in case user
+ forgoten to add nodes above the publishing backdrop node.
+ """
order = pyblish.api.ValidatorOrder
optional = True
@@ -63,8 +65,25 @@ class ValidateBackdrop(pyblish.api.InstancePlugin):
msg_multiple_outputs = (
"Only one outcoming connection from "
"\"{}\" is allowed").format(instance.data["name"])
- assert len(connections_out.keys()) <= 1, msg_multiple_outputs
- msg_no_content = "No content on backdrop node: \"{}\"".format(
+ if len(connections_out.keys()) > 1:
+ raise PublishXmlValidationError(
+ self,
+ msg_multiple_outputs,
+ "multiple_outputs"
+ )
+
+ msg_no_nodes = "No content on backdrop node: \"{}\"".format(
instance.data["name"])
- assert len(instance) > 1, msg_no_content
+
+ self.log.debug(
+ "Amount of nodes on instance: {}".format(
+ len(instance))
+ )
+
+ if len(instance) == 1:
+ raise PublishXmlValidationError(
+ self,
+ msg_no_nodes,
+ "no_nodes"
+ )
diff --git a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py
index d0d930f50c..2321bd1fd4 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_gizmo.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_gizmo.py
@@ -1,6 +1,7 @@
-import nuke
import pyblish
-from openpype.hosts.nuke.api.lib import maintained_selection
+from openpype.pipeline import PublishXmlValidationError
+from openpype.hosts.nuke.api import maintained_selection
+import nuke
class OpenFailedGroupNode(pyblish.api.Action):
@@ -8,7 +9,7 @@ class OpenFailedGroupNode(pyblish.api.Action):
Centering failed instance node in node grap
"""
- label = "Open Gizmo in Node Graph"
+ label = "Open Group"
icon = "wrench"
on = "failed"
@@ -48,11 +49,23 @@ class ValidateGizmo(pyblish.api.InstancePlugin):
with grpn:
connections_out = nuke.allNodes('Output')
- msg_multiple_outputs = "Only one outcoming connection from "
- "\"{}\" is allowed".format(instance.data["name"])
- assert len(connections_out) <= 1, msg_multiple_outputs
+ msg_multiple_outputs = (
+ "Only one outcoming connection from "
+ "\"{}\" is allowed").format(instance.data["name"])
+
+ if len(connections_out) > 1:
+ raise PublishXmlValidationError(
+ self, msg_multiple_outputs, "multiple_outputs",
+ {"node_name": grpn["name"].value()}
+ )
connections_in = nuke.allNodes('Input')
- msg_missing_inputs = "At least one Input node has to be used in: "
- "\"{}\"".format(instance.data["name"])
- assert len(connections_in) >= 1, msg_missing_inputs
+ msg_missing_inputs = (
+ "At least one Input node has to be inside Group: "
+ "\"{}\"").format(instance.data["name"])
+
+ if len(connections_in) == 0:
+ raise PublishXmlValidationError(
+ self, msg_missing_inputs, "no_inputs",
+ {"node_name": grpn["name"].value()}
+ )
diff --git a/openpype/hosts/nuke/plugins/publish/validate_knobs.py b/openpype/hosts/nuke/plugins/publish/validate_knobs.py
index d290ff4541..e2b11892e5 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_knobs.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_knobs.py
@@ -1,7 +1,8 @@
import nuke
-
+import six
import pyblish.api
import openpype.api
+from openpype.pipeline import PublishXmlValidationError
class ValidateKnobs(pyblish.api.ContextPlugin):
@@ -27,11 +28,21 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
optional = True
def process(self, context):
-
invalid = self.get_invalid(context, compute=True)
if invalid:
- raise RuntimeError(
- "Found knobs with invalid values:\n{}".format(invalid)
+ invalid_items = [
+ (
+ "Node __{node_name}__ with knob _{label}_ "
+ "expecting _{expected}_, "
+ "but is set to _{current}_"
+ ).format(**i)
+ for i in invalid
+ ]
+ raise PublishXmlValidationError(
+ self,
+ "Found knobs with invalid values:\n{}".format(invalid),
+ formatting_data={
+ "invalid_items": "\n".join(invalid_items)}
)
@classmethod
@@ -54,15 +65,24 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
- families = list(set(families) & set(cls.knobs.keys()))
+
if not families:
continue
# Get all knobs to validate.
knobs = {}
for family in families:
+ # check if dot in family
+ if "." in family:
+ family = family.split(".")[0]
+
+ # avoid families not in settings
+ if family not in cls.knobs:
+ continue
+
+ # get presets of knobs
for preset in cls.knobs[family]:
- knobs.update({preset: cls.knobs[family][preset]})
+ knobs[preset] = cls.knobs[family][preset]
# Get invalid knobs.
nodes = []
@@ -71,8 +91,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
nodes.append(node)
if node.Class() == "Group":
node.begin()
- for i in nuke.allNodes():
- nodes.append(i)
+ nodes.extend(iter(nuke.allNodes()))
node.end()
for node in nodes:
@@ -84,6 +103,7 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
if node[knob].value() != expected:
invalid_knobs.append(
{
+ "node_name": node.name(),
"knob": node[knob],
"name": node[knob].name(),
"label": node[knob].label(),
@@ -99,7 +119,9 @@ class ValidateKnobs(pyblish.api.ContextPlugin):
def repair(cls, instance):
invalid = cls.get_invalid(instance)
for data in invalid:
- if isinstance(data["expected"], unicode):
+ # TODO: will need to improve type definitions
+ # with the new settings for knob types
+ if isinstance(data["expected"], six.text_type):
data["knob"].setValue(str(data["expected"]))
continue
diff --git a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py
index 27094b8d74..fc07e9b83b 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_output_resolution.py
@@ -1,43 +1,9 @@
-import nuke
import pyblish.api
-
-
-class RepairWriteResolutionDifference(pyblish.api.Action):
-
- label = "Repair"
- icon = "wrench"
- on = "failed"
-
- def process(self, context, plugin):
-
- # Get the errored instances
- failed = []
- for result in context.data["results"]:
- if (result["error"] is not None and result["instance"] is not None
- and result["instance"] not in failed):
- failed.append(result["instance"])
-
- # Apply pyblish.logic to get the instances for the plug-in
- instances = pyblish.api.instances_by_plugin(failed, plugin)
-
- for instance in instances:
- reformat = instance[0].dependencies()[0]
- if reformat.Class() != "Reformat":
- reformat = nuke.nodes.Reformat(inputs=[instance[0].input(0)])
-
- xpos = instance[0].xpos()
- ypos = instance[0].ypos() - 26
-
- dependent_ypos = instance[0].dependencies()[0].ypos()
- if (instance[0].ypos() - dependent_ypos) <= 51:
- xpos += 110
-
- reformat.setXYpos(xpos, ypos)
-
- instance[0].setInput(0, reformat)
-
- reformat["resize"].setValue("none")
+import openpype.api
+from openpype.hosts.nuke.api import maintained_selection
+from openpype.pipeline import PublishXmlValidationError
+import nuke
class ValidateOutputResolution(pyblish.api.InstancePlugin):
@@ -52,27 +18,75 @@ class ValidateOutputResolution(pyblish.api.InstancePlugin):
families = ["render", "render.local", "render.farm"]
label = "Write Resolution"
hosts = ["nuke"]
- actions = [RepairWriteResolutionDifference]
+ actions = [openpype.api.RepairAction]
+
+ missing_msg = "Missing Reformat node in render group node"
+ resolution_msg = "Reformat is set to wrong format"
def process(self, instance):
+ invalid = self.get_invalid(instance)
+ if invalid:
+ raise PublishXmlValidationError(self, invalid)
- # Skip bounding box check if a reformat node exists.
- if instance[0].dependencies()[0].Class() == "Reformat":
- return
+ @classmethod
+ def get_reformat(cls, instance):
+ reformat = None
+ for inode in instance:
+ if inode.Class() != "Reformat":
+ continue
+ reformat = inode
- msg = "Bounding box is outside the format."
- assert self.check_resolution(instance), msg
+ return reformat
- def check_resolution(self, instance):
- node = instance[0]
+ @classmethod
+ def get_invalid(cls, instance):
+ def _check_resolution(instance, reformat):
+ root_width = instance.data["resolutionWidth"]
+ root_height = instance.data["resolutionHeight"]
- root_width = instance.data["resolutionWidth"]
- root_height = instance.data["resolutionHeight"]
+ write_width = reformat.format().width()
+ write_height = reformat.format().height()
- write_width = node.format().width()
- write_height = node.format().height()
+ if (root_width != write_width) or (root_height != write_height):
+ return None
+ else:
+ return True
- if (root_width != write_width) or (root_height != write_height):
- return None
- else:
- return True
+ # check if reformat is in render node
+ reformat = cls.get_reformat(instance)
+ if not reformat:
+ return cls.missing_msg
+
+ # check if reformat is set to correct root format
+ correct_format = _check_resolution(instance, reformat)
+ if not correct_format:
+ return cls.resolution_msg
+
+ @classmethod
+ def repair(cls, instance):
+ invalid = cls.get_invalid(instance)
+ grp_node = instance[0]
+
+ if cls.missing_msg == invalid:
+ # make sure we are inside of the group node
+ with grp_node:
+ # find input node and select it
+ _input = None
+ for inode in instance:
+ if inode.Class() != "Input":
+ continue
+ _input = inode
+
+ # add reformat node under it
+ with maintained_selection():
+ _input['selected'].setValue(True)
+ _rfn = nuke.createNode("Reformat", "name Reformat01")
+ _rfn["resize"].setValue(0)
+ _rfn["black_outside"].setValue(1)
+
+ cls.log.info("I am adding reformat node")
+
+ if cls.resolution_msg == invalid:
+ reformat = cls.get_reformat(instance)
+ reformat["format"].setValue(nuke.root()["format"].value())
+ cls.log.info("I am fixing reformat to root.format")
diff --git a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py
index 9c6ca03ffd..dac240ad19 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py
@@ -1,5 +1,6 @@
import pyblish
import nuke
+from openpype.pipeline import PublishXmlValidationError
class FixProxyMode(pyblish.api.Action):
@@ -7,7 +8,7 @@ class FixProxyMode(pyblish.api.Action):
Togger off proxy switch OFF
"""
- label = "Proxy toggle to OFF"
+ label = "Repair"
icon = "wrench"
on = "failed"
@@ -30,4 +31,7 @@ class ValidateProxyMode(pyblish.api.ContextPlugin):
rootNode = nuke.root()
isProxy = rootNode["proxy"].value()
- assert not isProxy, "Proxy mode should be toggled OFF"
+ if isProxy:
+ raise PublishXmlValidationError(
+ self, "Proxy mode should be toggled OFF"
+ )
diff --git a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py
index 5f7b1f3806..237ff423e5 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_rendered_frames.py
@@ -1,7 +1,7 @@
import os
import pyblish.api
-from openpype.api import ValidationException
import clique
+from openpype.pipeline import PublishXmlValidationError
@pyblish.api.log
@@ -36,7 +36,7 @@ class RepairActionBase(pyblish.api.Action):
class RepairCollectionActionToLocal(RepairActionBase):
- label = "Repair > rerender with `Local` machine"
+ label = "Repair - rerender with \"Local\""
def process(self, context, plugin):
instances = self.get_instance(context, plugin)
@@ -44,7 +44,7 @@ class RepairCollectionActionToLocal(RepairActionBase):
class RepairCollectionActionToFarm(RepairActionBase):
- label = "Repair > rerender `On farm` with remote machines"
+ label = "Repair - rerender with \"On farm\""
def process(self, context, plugin):
instances = self.get_instance(context, plugin)
@@ -63,6 +63,10 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
def process(self, instance):
+ f_data = {
+ "node_name": instance[0]["name"].value()
+ }
+
for repre in instance.data["representations"]:
if not repre.get("files"):
@@ -71,7 +75,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
"Check properties of write node (group) and"
"select 'Local' option in 'Publish' dropdown.")
self.log.error(msg)
- raise ValidationException(msg)
+ raise PublishXmlValidationError(
+ self, msg, formatting_data=f_data)
if isinstance(repre["files"], str):
return
@@ -82,21 +87,23 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
collection = collections[0]
- fstartH = instance.data["frameStartHandle"]
- fendH = instance.data["frameEndHandle"]
+ f_start_h = instance.data["frameStartHandle"]
+ f_end_h = instance.data["frameEndHandle"]
- frame_length = int(fendH - fstartH + 1)
+ frame_length = int(f_end_h - f_start_h + 1)
if frame_length != 1:
if len(collections) != 1:
msg = "There are multiple collections in the folder"
self.log.error(msg)
- raise ValidationException(msg)
+ raise PublishXmlValidationError(
+ self, msg, formatting_data=f_data)
if not collection.is_contiguous():
msg = "Some frames appear to be missing"
self.log.error(msg)
- raise ValidationException(msg)
+ raise PublishXmlValidationError(
+ self, msg, formatting_data=f_data)
collected_frames_len = len(collection.indexes)
coll_start = min(collection.indexes)
@@ -105,7 +112,8 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
self.log.info("frame_length: {}".format(frame_length))
self.log.info("collected_frames_len: {}".format(
collected_frames_len))
- self.log.info("fstartH-fendH: {}-{}".format(fstartH, fendH))
+ self.log.info("f_start_h-f_end_h: {}-{}".format(
+ f_start_h, f_end_h))
self.log.info(
"coll_start-coll_end: {}-{}".format(coll_start, coll_end))
@@ -116,13 +124,19 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
if ("slate" in instance.data["families"]) \
and (frame_length != collected_frames_len):
collected_frames_len -= 1
- fstartH += 1
+ f_start_h += 1
- assert ((collected_frames_len >= frame_length)
- and (coll_start <= fstartH)
- and (coll_end >= fendH)), (
- "{} missing frames. Use repair to render all frames"
- ).format(__name__)
+ if (
+ collected_frames_len != frame_length
+ and coll_start <= f_start_h
+ and coll_end >= f_end_h
+ ):
+ raise PublishXmlValidationError(
+ self, (
+ "{} missing frames. Use repair to "
+ "render all frames"
+ ).format(__name__), formatting_data=f_data
+ )
instance.data["collection"] = collection
diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py
deleted file mode 100644
index b8d7494b9d..0000000000
--- a/openpype/hosts/nuke/plugins/publish/validate_script.py
+++ /dev/null
@@ -1,156 +0,0 @@
-import pyblish.api
-
-from openpype.client import get_project, get_asset_by_id, get_asset_by_name
-from openpype.pipeline import legacy_io
-
-
-@pyblish.api.log
-class ValidateScript(pyblish.api.InstancePlugin):
- """ Validates file output. """
-
- order = pyblish.api.ValidatorOrder + 0.1
- families = ["workfile"]
- label = "Check script settings"
- hosts = ["nuke"]
- optional = True
-
- def process(self, instance):
- ctx_data = instance.context.data
- project_name = legacy_io.active_project()
- asset_name = ctx_data["asset"]
- # TODO repace query with using 'instance.data["assetEntity"]'
- asset = get_asset_by_name(project_name, asset_name)
- asset_data = asset["data"]
-
- # These attributes will be checked
- attributes = [
- "fps",
- "frameStart",
- "frameEnd",
- "resolutionWidth",
- "resolutionHeight",
- "handleStart",
- "handleEnd"
- ]
-
- # Value of these attributes can be found on parents
- hierarchical_attributes = [
- "fps",
- "resolutionWidth",
- "resolutionHeight",
- "pixelAspect",
- "handleStart",
- "handleEnd"
- ]
-
- missing_attributes = []
- asset_attributes = {}
- for attr in attributes:
- if attr in asset_data:
- asset_attributes[attr] = asset_data[attr]
-
- elif attr in hierarchical_attributes:
- # TODO this should be probably removed
- # Hierarchical attributes is not a thing since Pype 2?
-
- # Try to find attribute on parent
- parent_id = asset['parent']
- parent_type = "project"
- if asset_data['visualParent'] is not None:
- parent_type = "asset"
- parent_id = asset_data['visualParent']
-
- value = self.check_parent_hierarchical(
- project_name, parent_type, parent_id, attr
- )
- if value is None:
- missing_attributes.append(attr)
- else:
- asset_attributes[attr] = value
- else:
- missing_attributes.append(attr)
-
- # Raise error if attributes weren't found on asset in database
- if len(missing_attributes) > 0:
- atr = ", ".join(missing_attributes)
- msg = 'Missing attributes "{}" in asset "{}"'
- message = msg.format(atr, asset_name)
- raise ValueError(message)
-
- # Get handles from database, Default is 0 (if not found)
- handle_start = 0
- handle_end = 0
- if "handleStart" in asset_attributes:
- handle_start = asset_attributes["handleStart"]
- if "handleEnd" in asset_attributes:
- handle_end = asset_attributes["handleEnd"]
-
- asset_attributes["fps"] = float("{0:.4f}".format(
- asset_attributes["fps"]))
-
- # Get values from nukescript
- script_attributes = {
- "handleStart": ctx_data["handleStart"],
- "handleEnd": ctx_data["handleEnd"],
- "fps": float("{0:.4f}".format(ctx_data["fps"])),
- "frameStart": ctx_data["frameStart"],
- "frameEnd": ctx_data["frameEnd"],
- "resolutionWidth": ctx_data["resolutionWidth"],
- "resolutionHeight": ctx_data["resolutionHeight"],
- "pixelAspect": ctx_data["pixelAspect"]
- }
-
- # Compare asset's values Nukescript X Database
- not_matching = []
- for attr in attributes:
- self.log.debug("asset vs script attribute \"{}\": {}, {}".format(
- attr, asset_attributes[attr], script_attributes[attr])
- )
- if asset_attributes[attr] != script_attributes[attr]:
- not_matching.append(attr)
-
- # Raise error if not matching
- if len(not_matching) > 0:
- msg = "Attributes '{}' are not set correctly"
- # Alert user that handles are set if Frame start/end not match
- if (
- (("frameStart" in not_matching) or ("frameEnd" in not_matching)) and
- ((handle_start > 0) or (handle_end > 0))
- ):
- msg += " (`handle_start` are set to {})".format(handle_start)
- msg += " (`handle_end` are set to {})".format(handle_end)
- message = msg.format(", ".join(not_matching))
- raise ValueError(message)
-
- def check_parent_hierarchical(
- self, project_name, parent_type, parent_id, attr
- ):
- if parent_id is None:
- return None
-
- doc = None
- if parent_type == "project":
- doc = get_project(project_name)
- elif parent_type == "asset":
- doc = get_asset_by_id(project_name, parent_id)
-
- if not doc:
- return None
-
- doc_data = doc["data"]
- if attr in doc_data:
- self.log.info(attr)
- return doc_data[attr]
-
- if parent_type == "project":
- return None
-
- parent_id = doc_data.get("visualParent")
- new_parent_type = "asset"
- if parent_id is None:
- parent_id = doc["parent"]
- new_parent_type = "project"
-
- return self.check_parent_hierarchical(
- project_name, new_parent_type, parent_id, attr
- )
diff --git a/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py
new file mode 100644
index 0000000000..106d7a2524
--- /dev/null
+++ b/openpype/hosts/nuke/plugins/publish/validate_script_attributes.py
@@ -0,0 +1,127 @@
+from pprint import pformat
+import pyblish.api
+
+import openpype.api
+from openpype.pipeline import PublishXmlValidationError
+from openpype.hosts.nuke.api.lib import (
+ get_avalon_knob_data,
+ WorkfileSettings
+)
+import nuke
+
+
+@pyblish.api.log
+class ValidateScriptAttributes(pyblish.api.InstancePlugin):
+ """ Validates file output. """
+
+ order = pyblish.api.ValidatorOrder + 0.1
+ families = ["workfile"]
+ label = "Validatte script attributes"
+ hosts = ["nuke"]
+ optional = True
+ actions = [openpype.api.RepairAction]
+
+ def process(self, instance):
+ root = nuke.root()
+ knob_data = get_avalon_knob_data(root)
+ asset = instance.data["assetEntity"]
+ # get asset data frame values
+ frame_start = asset["data"]["frameStart"]
+ frame_end = asset["data"]["frameEnd"]
+ handle_start = asset["data"]["handleStart"]
+ handle_end = asset["data"]["handleEnd"]
+
+ # These attributes will be checked
+ attributes = [
+ "fps",
+ "frameStart",
+ "frameEnd",
+ "resolutionWidth",
+ "resolutionHeight",
+ "handleStart",
+ "handleEnd"
+ ]
+
+ # get only defined attributes from asset data
+ asset_attributes = {
+ attr: asset["data"][attr]
+ for attr in attributes
+ if attr in asset["data"]
+ }
+ # fix float to max 4 digints (only for evaluating)
+ fps_data = float("{0:.4f}".format(
+ asset_attributes["fps"]))
+ # fix frame values to include handles
+ asset_attributes.update({
+ "frameStart": frame_start - handle_start,
+ "frameEnd": frame_end + handle_end,
+ "fps": fps_data
+ })
+
+ self.log.debug(pformat(
+ asset_attributes
+ ))
+
+ # Get format
+ _format = root["format"].value()
+
+ # Get values from nukescript
+ script_attributes = {
+ "handleStart": int(knob_data["handleStart"]),
+ "handleEnd": int(knob_data["handleEnd"]),
+ "fps": float("{0:.4f}".format(root['fps'].value())),
+ "frameStart": int(root["first_frame"].getValue()),
+ "frameEnd": int(root["last_frame"].getValue()),
+ "resolutionWidth": _format.width(),
+ "resolutionHeight": _format.height(),
+ "pixelAspect": _format.pixelAspect()
+ }
+ self.log.debug(pformat(
+ script_attributes
+ ))
+
+ # Compare asset's values Nukescript X Database
+ not_matching = []
+ for attr in attributes:
+ self.log.debug(
+ "Asset vs Script attribute \"{}\": {}, {}".format(
+ attr,
+ asset_attributes[attr],
+ script_attributes[attr]
+ )
+ )
+ if asset_attributes[attr] != script_attributes[attr]:
+ not_matching.append({
+ "name": attr,
+ "expected": asset_attributes[attr],
+ "actual": script_attributes[attr]
+ })
+
+ # Raise error if not matching
+ if not_matching:
+ msg = "Following attributes are not set correctly: \n{}"
+ attrs_wrong_str = "\n".join([
+ (
+ "`{0}` is set to `{1}`, "
+ "but should be set to `{2}`"
+ ).format(at["name"], at["actual"], at["expected"])
+ for at in not_matching
+ ])
+ attrs_wrong_html = "
".join([
+ (
+ "-- __{0}__ is set to __{1}__, "
+ "but should be set to __{2}__"
+ ).format(at["name"], at["actual"], at["expected"])
+ for at in not_matching
+ ])
+ raise PublishXmlValidationError(
+ self, msg.format(attrs_wrong_str),
+ formatting_data={
+ "failed_attributes": attrs_wrong_html
+ }
+ )
+
+ @classmethod
+ def repair(cls, instance):
+ cls.log.debug("__ repairing instance: {}".format(instance))
+ WorkfileSettings().set_context_settings()
diff --git a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py
index c0d5c8f402..362ff31174 100644
--- a/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py
+++ b/openpype/hosts/nuke/plugins/publish/validate_write_nodes.py
@@ -1,10 +1,10 @@
-import os
import pyblish.api
-import openpype.utils
+from openpype.api import get_errored_instances_from_context
from openpype.hosts.nuke.api.lib import (
get_write_node_template_attr,
- get_node_path
+ set_node_knobs_from_settings
)
+from openpype.pipeline import PublishXmlValidationError
@pyblish.api.log
@@ -14,18 +14,29 @@ class RepairNukeWriteNodeAction(pyblish.api.Action):
icon = "wrench"
def process(self, context, plugin):
- instances = openpype.utils.filter_instances(context, plugin)
+ instances = get_errored_instances_from_context(context)
for instance in instances:
- node = instance[1]
- correct_data = get_write_node_template_attr(node)
- for k, v in correct_data.items():
- node[k].setValue(v)
+ write_group_node = instance[0]
+ # get write node from inside of group
+ write_node = None
+ for x in instance:
+ if x.Class() == "Write":
+ write_node = x
+
+ correct_data = get_write_node_template_attr(write_group_node)
+
+ set_node_knobs_from_settings(write_node, correct_data["knobs"])
+
self.log.info("Node attributes were fixed")
class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
- """ Validates file output. """
+ """ Validate Write node's knobs.
+
+ Compare knobs on write node inside the render group
+ with settings. At the moment supporting only `file` knob.
+ """
order = pyblish.api.ValidatorOrder
optional = True
@@ -35,38 +46,69 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
hosts = ["nuke"]
def process(self, instance):
+ write_group_node = instance[0]
- node = instance[1]
- correct_data = get_write_node_template_attr(node)
+ # get write node from inside of group
+ write_node = None
+ for x in instance:
+ if x.Class() == "Write":
+ write_node = x
+
+ if write_node is None:
+ return
+
+ correct_data = get_write_node_template_attr(write_group_node)
+
+ if correct_data:
+ check_knobs = correct_data["knobs"]
+ else:
+ return
check = []
- for k, v in correct_data.items():
- if k is 'file':
- padding = len(v.split('#'))
- ref_path = get_node_path(v, padding)
- n_path = get_node_path(node[k].value(), padding)
- isnt = False
- for i, p in enumerate(ref_path):
- if str(n_path[i]) not in str(p):
- if not isnt:
- isnt = True
- else:
- continue
- if isnt:
- check.append([k, v, node[k].value()])
+ self.log.debug("__ write_node: {}".format(
+ write_node
+ ))
+
+ for knob_data in check_knobs:
+ key = knob_data["name"]
+ value = knob_data["value"]
+ node_value = write_node[key].value()
+
+ # fix type differences
+ if type(node_value) in (int, float):
+ value = float(value)
+ node_value = float(node_value)
else:
- if str(node[k].value()) not in str(v):
- check.append([k, v, node[k].value()])
+ value = str(value)
+ node_value = str(node_value)
+
+ self.log.debug("__ key: {} | value: {}".format(
+ key, value
+ ))
+ if (
+ node_value != value
+ and key != "file"
+ and key != "tile_color"
+ ):
+ check.append([key, value, write_node[key].value()])
self.log.info(check)
- msg = "Node's attribute `{0}` is not correct!\n" \
- "\nCorrect: `{1}` \n\nWrong: `{2}` \n\n"
-
if check:
- print_msg = ""
- for item in check:
- print_msg += msg.format(item[0], item[1], item[2])
- print_msg += "`RMB` click to the validator and `A` to fix!"
+ self._make_error(check)
- assert not check, print_msg
+ def _make_error(self, check):
+ # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block
+ dbg_msg = "Write node's knobs values are not correct!\n"
+ msg_add = "Knob '{0}' > Correct: `{1}` > Wrong: `{2}`"
+
+ details = [
+ msg_add.format(item[0], item[1], item[2])
+ for item in check
+ ]
+ xml_msg = "
".join(details)
+ dbg_msg += "\n\t".join(details)
+
+ raise PublishXmlValidationError(
+ self, dbg_msg, formatting_data={"xml_msg": xml_msg}
+ )
diff --git a/openpype/hosts/nuke/startup/clear_rendered.py b/openpype/hosts/nuke/startup/clear_rendered.py
index cf1d8ce170..744af71034 100644
--- a/openpype/hosts/nuke/startup/clear_rendered.py
+++ b/openpype/hosts/nuke/startup/clear_rendered.py
@@ -1,10 +1,11 @@
import os
-from openpype.api import Logger
-log = Logger().get_logger(__name__)
+from openpype.lib import Logger
def clear_rendered(dir_path):
+ log = Logger.get_logger(__name__)
+
for _f in os.listdir(dir_path):
_f_path = os.path.join(dir_path, _f)
log.info("Removing: `{}`".format(_f_path))
diff --git a/openpype/hosts/nuke/startup/write_to_read.py b/openpype/hosts/nuke/startup/write_to_read.py
index f5cf66b357..b7add40f47 100644
--- a/openpype/hosts/nuke/startup/write_to_read.py
+++ b/openpype/hosts/nuke/startup/write_to_read.py
@@ -2,8 +2,8 @@ import re
import os
import glob
import nuke
-from openpype.api import Logger
-log = Logger().get_logger(__name__)
+from openpype.lib import Logger
+log = Logger.get_logger(__name__)
SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v',
'm2v']
diff --git a/openpype/hosts/photoshop/__init__.py b/openpype/hosts/photoshop/__init__.py
index a91e0a65ff..773f73d624 100644
--- a/openpype/hosts/photoshop/__init__.py
+++ b/openpype/hosts/photoshop/__init__.py
@@ -1,9 +1,10 @@
-def add_implementation_envs(env, _app):
- """Modify environments to contain all required for implementation."""
- defaults = {
- "OPENPYPE_LOG_NO_COLORS": "True",
- "WEBSOCKET_URL": "ws://localhost:8099/ws/"
- }
- for key, value in defaults.items():
- if not env.get(key):
- env[key] = value
+from .addon import (
+ PhotoshopAddon,
+ PHOTOSHOP_HOST_DIR,
+)
+
+
+__all__ = (
+ "PhotoshopAddon",
+ "PHOTOSHOP_HOST_DIR",
+)
diff --git a/openpype/hosts/photoshop/addon.py b/openpype/hosts/photoshop/addon.py
new file mode 100644
index 0000000000..18899d4de8
--- /dev/null
+++ b/openpype/hosts/photoshop/addon.py
@@ -0,0 +1,26 @@
+import os
+from openpype.modules import OpenPypeModule
+from openpype.modules.interfaces import IHostModule
+
+PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class PhotoshopAddon(OpenPypeModule, IHostModule):
+ name = "photoshop"
+ host_name = "photoshop"
+
+ def initialize(self, module_settings):
+ self.enabled = True
+
+ def add_implementation_envs(self, env, _app):
+ """Modify environments to contain all required for implementation."""
+ defaults = {
+ "OPENPYPE_LOG_NO_COLORS": "True",
+ "WEBSOCKET_URL": "ws://localhost:8099/ws/"
+ }
+ for key, value in defaults.items():
+ if not env.get(key):
+ env[key] = value
+
+ def get_workfile_extensions(self):
+ return [".psd", ".psb"]
diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py
index 2f57d64464..73a546604f 100644
--- a/openpype/hosts/photoshop/api/lib.py
+++ b/openpype/hosts/photoshop/api/lib.py
@@ -5,11 +5,10 @@ import traceback
from Qt import QtWidgets
-from openpype.api import Logger
+from openpype.lib import env_value_to_bool, Logger
+from openpype.modules import ModulesManager
from openpype.pipeline import install_host
from openpype.tools.utils import host_tools
-from openpype.lib.remote_publish import headless_publish
-from openpype.lib import env_value_to_bool
from .launch_logic import ProcessLauncher, stub
@@ -35,8 +34,10 @@ def main(*subprocess_args):
launcher.start()
if env_value_to_bool("HEADLESS_PUBLISH"):
+ manager = ModulesManager()
+ webpublisher_addon = manager["webpublisher"]
launcher.execute_in_main_thread(
- headless_publish,
+ webpublisher_addon.headless_publish,
log,
"ClosePS",
os.environ.get("IS_TEST")
diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py
index ee150d1808..f660096630 100644
--- a/openpype/hosts/photoshop/api/pipeline.py
+++ b/openpype/hosts/photoshop/api/pipeline.py
@@ -14,14 +14,13 @@ from openpype.pipeline import (
AVALON_CONTAINER_ID,
)
from openpype.pipeline.load import any_outdated_containers
-import openpype.hosts.photoshop
+from openpype.hosts.photoshop import PHOTOSHOP_HOST_DIR
from . import lib
log = Logger.get_logger(__name__)
-HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.photoshop.__file__))
-PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
+PLUGINS_DIR = os.path.join(PHOTOSHOP_HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
diff --git a/openpype/hosts/photoshop/api/workio.py b/openpype/hosts/photoshop/api/workio.py
index 951c5dbfff..35b44d6070 100644
--- a/openpype/hosts/photoshop/api/workio.py
+++ b/openpype/hosts/photoshop/api/workio.py
@@ -1,7 +1,6 @@
"""Host API required Work Files tool"""
import os
-from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
from . import lib
@@ -14,7 +13,7 @@ def _active_document():
def file_extensions():
- return HOST_WORKFILE_EXTENSIONS["photoshop"]
+ return [".psd", ".psb"]
def has_unsaved_changes():
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
index 2881ef0ea6..5d50a78914 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py
@@ -17,11 +17,11 @@ import os
import pyblish.api
-from openpype.lib.plugin_tools import (
- parse_json,
- get_batch_asset_task_info
-)
from openpype.pipeline import legacy_io
+from openpype_modules.webpublisher.lib import (
+ get_batch_asset_task_info,
+ parse_json
+)
class CollectBatchData(pyblish.api.ContextPlugin):
diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
index 71bd2cd854..c157c932fd 100644
--- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
+++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py
@@ -9,14 +9,22 @@ from openpype.settings import get_project_settings
class CollectColorCodedInstances(pyblish.api.ContextPlugin):
- """Creates instances for configured color code of a layer.
+ """Creates instances for layers marked by configurable color.
Used in remote publishing when artists marks publishable layers by color-
- coding.
+ coding. Top level layers (group) must be marked by specific color to be
+ published as an instance of 'image' family.
Can add group for all publishable layers to allow creation of flattened
image. (Cannot contain special background layer as it cannot be grouped!)
+ Based on value `create_flatten_image` from Settings:
+ - "yes": create flattened 'image' subset of all publishable layers + create
+ 'image' subset per publishable layer
+ - "only": create ONLY flattened 'image' subset of all publishable layers
+ - "no": do not create flattened 'image' subset at all,
+ only separate subsets per marked layer.
+
Identifier:
id (str): "pyblish.avalon.instance"
"""
@@ -32,8 +40,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
# TODO check if could be set globally, probably doesn't make sense when
# flattened template cannot
subset_template_name = ""
- create_flatten_image = False
- # probably not possible to configure this globally
+ create_flatten_image = "no"
flatten_subset_template = ""
def process(self, context):
@@ -62,6 +69,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
publishable_layers = []
created_instances = []
+ family_from_settings = None
for layer in layers:
self.log.debug("Layer:: {}".format(layer))
if layer.parents:
@@ -80,6 +88,9 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
self.log.debug("!!! Not found family or template, skip")
continue
+ if not family_from_settings:
+ family_from_settings = resolved_family
+
fill_pairs = {
"variant": variant,
"family": resolved_family,
@@ -98,13 +109,16 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
"Subset {} already created, skipping.".format(subset))
continue
- instance = self._create_instance(context, layer, resolved_family,
- asset_name, subset, task_name)
+ if self.create_flatten_image != "flatten_only":
+ instance = self._create_instance(context, layer,
+ resolved_family,
+ asset_name, subset, task_name)
+ created_instances.append(instance)
+
existing_subset_names.append(subset)
publishable_layers.append(layer)
- created_instances.append(instance)
- if self.create_flatten_image and publishable_layers:
+ if self.create_flatten_image != "no" and publishable_layers:
self.log.debug("create_flatten_image")
if not self.flatten_subset_template:
self.log.warning("No template for flatten image")
@@ -116,7 +130,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin):
first_layer = publishable_layers[0] # dummy layer
first_layer.name = subset
- family = created_instances[0].data["family"] # inherit family
+ family = family_from_settings # inherit family
instance = self._create_instance(context, first_layer,
family,
asset_name, subset, task_name)
diff --git a/openpype/hosts/tvpaint/worker/worker_job.py b/openpype/hosts/tvpaint/worker/worker_job.py
index 1c785ab2ee..95c0a678bc 100644
--- a/openpype/hosts/tvpaint/worker/worker_job.py
+++ b/openpype/hosts/tvpaint/worker/worker_job.py
@@ -9,7 +9,7 @@ from abc import ABCMeta, abstractmethod, abstractproperty
import six
-from openpype.api import PypeLogger
+from openpype.lib import Logger
from openpype.modules import ModulesManager
@@ -328,7 +328,7 @@ class TVPaintCommands:
def log(self):
"""Access to logger object."""
if self._log is None:
- self._log = PypeLogger.get_logger(self.__class__.__name__)
+ self._log = Logger.get_logger(self.__class__.__name__)
return self._log
@property
diff --git a/openpype/hosts/webpublisher/__init__.py b/openpype/hosts/webpublisher/__init__.py
index e69de29bb2..4e918c5d7d 100644
--- a/openpype/hosts/webpublisher/__init__.py
+++ b/openpype/hosts/webpublisher/__init__.py
@@ -0,0 +1,10 @@
+from .addon import (
+ WebpublisherAddon,
+ WEBPUBLISHER_ROOT_DIR,
+)
+
+
+__all__ = (
+ "WebpublisherAddon",
+ "WEBPUBLISHER_ROOT_DIR",
+)
diff --git a/openpype/hosts/webpublisher/addon.py b/openpype/hosts/webpublisher/addon.py
new file mode 100644
index 0000000000..7d26d5a7ff
--- /dev/null
+++ b/openpype/hosts/webpublisher/addon.py
@@ -0,0 +1,106 @@
+import os
+
+import click
+
+from openpype.modules import OpenPypeModule
+from openpype.modules.interfaces import IHostModule
+
+WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+class WebpublisherAddon(OpenPypeModule, IHostModule):
+ name = "webpublisher"
+ host_name = "webpublisher"
+
+ def initialize(self, module_settings):
+ self.enabled = True
+
+ def headless_publish(self, log, close_plugin_name=None, is_test=False):
+ """Runs publish in a opened host with a context.
+
+ Close Python process at the end.
+ """
+
+ from openpype.pipeline.publish.lib import remote_publish
+ from .lib import get_webpublish_conn, publish_and_log
+
+ if is_test:
+ remote_publish(log, close_plugin_name)
+ return
+
+ dbcon = get_webpublish_conn()
+ _id = os.environ.get("BATCH_LOG_ID")
+ if not _id:
+ log.warning("Unable to store log records, "
+ "batch will be unfinished!")
+ return
+
+ publish_and_log(
+ dbcon, _id, log, close_plugin_name=close_plugin_name
+ )
+
+ def cli(self, click_group):
+ click_group.add_command(cli_main)
+
+
+@click.group(
+ WebpublisherAddon.name,
+ help="Webpublisher related commands.")
+def cli_main():
+ pass
+
+
+@cli_main.command()
+@click.argument("path")
+@click.option("-u", "--user", help="User email address")
+@click.option("-p", "--project", help="Project")
+@click.option("-t", "--targets", help="Targets", default=None,
+ multiple=True)
+def publish(project, path, user=None, targets=None):
+ """Start publishing (Inner command).
+
+ Publish collects json from paths provided as an argument.
+ More than one path is allowed.
+ """
+
+ from .publish_functions import cli_publish
+
+ cli_publish(project, path, user, targets)
+
+
+@cli_main.command()
+@click.argument("path")
+@click.option("-p", "--project", help="Project")
+@click.option("-h", "--host", help="Host")
+@click.option("-u", "--user", help="User email address")
+@click.option("-t", "--targets", help="Targets", default=None,
+ multiple=True)
+def publishfromapp(project, path, host, user=None, targets=None):
+ """Start publishing through application (Inner command).
+
+ Publish collects json from paths provided as an argument.
+ More than one path is allowed.
+ """
+
+ from .publish_functions import cli_publish_from_app
+
+ cli_publish_from_app(project, path, host, user, targets)
+
+
+@cli_main.command()
+@click.option("-e", "--executable", help="Executable")
+@click.option("-u", "--upload_dir", help="Upload dir")
+@click.option("-h", "--host", help="Host", default=None)
+@click.option("-p", "--port", help="Port", default=None)
+def webserver(executable, upload_dir, host=None, port=None):
+ """Start service for communication with Webpublish Front end.
+
+ OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
+ FTRACK_BOT_API_KEY provided with api key from Ftrack.
+
+ Expect "pype.club" user created on Ftrack.
+ """
+
+ from .webserver_service import run_webserver
+
+ run_webserver(executable, upload_dir, host, port)
diff --git a/openpype/hosts/webpublisher/api/__init__.py b/openpype/hosts/webpublisher/api/__init__.py
index 18e3a16cf5..afea838e2c 100644
--- a/openpype/hosts/webpublisher/api/__init__.py
+++ b/openpype/hosts/webpublisher/api/__init__.py
@@ -1,31 +1,23 @@
import os
import logging
-from pyblish import api as pyblish
-import openpype.hosts.webpublisher
-from openpype.pipeline import legacy_io
+import pyblish.api
+
+from openpype.host import HostBase
+from openpype.hosts.webpublisher import WEBPUBLISHER_ROOT_DIR
log = logging.getLogger("openpype.hosts.webpublisher")
-HOST_DIR = os.path.dirname(os.path.abspath(
- openpype.hosts.webpublisher.__file__))
-PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
-PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
+class WebpublisherHost(HostBase):
+ name = "webpublisher"
-def install():
- print("Installing Pype config...")
+ def install(self):
+ print("Installing Pype config...")
+ pyblish.api.register_host(self.name)
- pyblish.register_plugin_path(PUBLISH_PATH)
- log.info(PUBLISH_PATH)
-
- legacy_io.install()
-
-
-def uninstall():
- pyblish.deregister_plugin_path(PUBLISH_PATH)
-
-
-# to have required methods for interface
-def ls():
- pass
+ publish_plugin_dir = os.path.join(
+ WEBPUBLISHER_ROOT_DIR, "plugins", "publish"
+ )
+ pyblish.api.register_plugin_path(publish_plugin_dir)
+ self.log.info(publish_plugin_dir)
diff --git a/openpype/lib/remote_publish.py b/openpype/hosts/webpublisher/lib.py
similarity index 75%
rename from openpype/lib/remote_publish.py
rename to openpype/hosts/webpublisher/lib.py
index b4b05c053b..4bc3f1db80 100644
--- a/openpype/lib/remote_publish.py
+++ b/openpype/hosts/webpublisher/lib.py
@@ -1,6 +1,7 @@
import os
from datetime import datetime
import collections
+import json
from bson.objectid import ObjectId
@@ -8,9 +9,10 @@ import pyblish.util
import pyblish.api
from openpype.client.mongo import OpenPypeMongoConnection
-from openpype.lib.plugin_tools import parse_json
+from openpype.settings import get_project_settings
+from openpype.lib import Logger
from openpype.lib.profiles_filtering import filter_profiles
-from openpype.api import get_project_settings
+from openpype.pipeline.publish.lib import find_close_plugin
ERROR_STATUS = "error"
IN_PROGRESS_STATUS = "in_progress"
@@ -19,21 +21,51 @@ SENT_REPROCESSING_STATUS = "sent_for_reprocessing"
FINISHED_REPROCESS_STATUS = "republishing_finished"
FINISHED_OK_STATUS = "finished_ok"
+log = Logger.get_logger(__name__)
-def headless_publish(log, close_plugin_name=None, is_test=False):
- """Runs publish in a opened host with a context and closes Python process.
+
+def parse_json(path):
+ """Parses json file at 'path' location
+
+ Returns:
+ (dict) or None if unparsable
+ Raises:
+ AsssertionError if 'path' doesn't exist
"""
- if not is_test:
- dbcon = get_webpublish_conn()
- _id = os.environ.get("BATCH_LOG_ID")
- if not _id:
- log.warning("Unable to store log records, "
- "batch will be unfinished!")
- return
+ path = path.strip('\"')
+ assert os.path.isfile(path), (
+ "Path to json file doesn't exist. \"{}\"".format(path)
+ )
+ data = None
+ with open(path, "r") as json_file:
+ try:
+ data = json.load(json_file)
+ except Exception as exc:
+ log.error(
+ "Error loading json: {} - Exception: {}".format(path, exc)
+ )
+ return data
- publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name)
+
+def get_batch_asset_task_info(ctx):
+ """Parses context data from webpublisher's batch metadata
+
+ Returns:
+ (tuple): asset, task_name (Optional), task_type
+ """
+ task_type = "default_task_type"
+ task_name = None
+ asset = None
+
+ if ctx["type"] == "task":
+ items = ctx["path"].split('/')
+ asset = items[-2]
+ task_name = ctx["name"]
+ task_type = ctx["attributes"]["type"]
else:
- publish(log, close_plugin_name)
+ asset = ctx["name"]
+
+ return asset, task_name, task_type
def get_webpublish_conn():
@@ -62,43 +94,13 @@ def start_webpublish_log(dbcon, batch_id, user):
}).inserted_id
-def publish(log, close_plugin_name=None, raise_error=False):
- """Loops through all plugins, logs to console. Used for tests.
-
- Args:
- log (OpenPypeLogger)
- close_plugin_name (str): name of plugin with responsibility to
- close host app
- """
- # Error exit as soon as any error occurs.
- error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
-
- close_plugin = _get_close_plugin(close_plugin_name, log)
-
- for result in pyblish.util.publish_iter():
- for record in result["records"]:
- log.info("{}: {}".format(
- result["plugin"].label, record.msg))
-
- if result["error"]:
- error_message = error_format.format(**result)
- log.error(error_message)
- if close_plugin: # close host app explicitly after error
- context = pyblish.api.Context()
- close_plugin().process(context)
- if raise_error:
- # Fatal Error is because of Deadline
- error_message = "Fatal Error: " + error_format.format(**result)
- raise RuntimeError(error_message)
-
-
def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
"""Loops through all plugins, logs ok and fails into OP DB.
Args:
dbcon (OpenPypeMongoConnection)
_id (str) - id of current job in DB
- log (OpenPypeLogger)
+ log (openpype.lib.Logger)
batch_id (str) - id sent from frontend
close_plugin_name (str): name of plugin with responsibility to
close host app
@@ -107,7 +109,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n"
error_format += "-" * 80 + "\n"
- close_plugin = _get_close_plugin(close_plugin_name, log)
+ close_plugin = find_close_plugin(close_plugin_name, log)
if isinstance(_id, str):
_id = ObjectId(_id)
@@ -226,16 +228,6 @@ def find_variant_key(application_manager, host):
return found_variant_key
-def _get_close_plugin(close_plugin_name, log):
- if close_plugin_name:
- plugins = pyblish.api.discover()
- for plugin in plugins:
- if plugin.__name__ == close_plugin_name:
- return plugin
-
- log.debug("Close plugin not found, app might not close.")
-
-
def get_task_data(batch_dir):
"""Return parsed data from first task manifest.json
diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
index 9ff779636a..eb2737b276 100644
--- a/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
+++ b/openpype/hosts/webpublisher/plugins/publish/collect_batch_data.py
@@ -13,12 +13,13 @@ import os
import pyblish.api
-from openpype.lib.plugin_tools import (
- parse_json,
- get_batch_asset_task_info
-)
-from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS
from openpype.pipeline import legacy_io
+from openpype_modules.webpublisher.lib import (
+ parse_json,
+ get_batch_asset_task_info,
+ get_webpublish_conn,
+ IN_PROGRESS_STATUS
+)
class CollectBatchData(pyblish.api.ContextPlugin):
diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
index 20e277d794..454f78ce9d 100644
--- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
+++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py
@@ -23,10 +23,8 @@ from openpype.lib import (
get_ffprobe_streams,
convert_ffprobe_fps_value,
)
-from openpype.lib.plugin_tools import (
- parse_json,
- get_subset_name_with_asset_doc
-)
+from openpype.lib.plugin_tools import get_subset_name_with_asset_doc
+from openpype_modules.webpublisher.lib import parse_json
class CollectPublishedFiles(pyblish.api.ContextPlugin):
diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py
index f0f29260a2..b5f8ed9c8f 100644
--- a/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py
+++ b/openpype/hosts/webpublisher/plugins/publish/collect_tvpaint_workfile_data.py
@@ -16,11 +16,11 @@ import uuid
import json
import shutil
import pyblish.api
-from openpype.lib.plugin_tools import parse_json
from openpype.hosts.tvpaint.worker import (
SenderTVPaintCommands,
CollectSceneData
)
+from openpype_modules.webpublisher.lib import parse_json
class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin):
diff --git a/openpype/hosts/webpublisher/publish_functions.py b/openpype/hosts/webpublisher/publish_functions.py
new file mode 100644
index 0000000000..83f53ced68
--- /dev/null
+++ b/openpype/hosts/webpublisher/publish_functions.py
@@ -0,0 +1,205 @@
+import os
+import time
+import pyblish.api
+import pyblish.util
+
+from openpype.lib import Logger
+from openpype.lib.applications import (
+ ApplicationManager,
+ get_app_environments_for_context,
+)
+from openpype.pipeline import install_host
+from openpype.hosts.webpublisher.api import WebpublisherHost
+
+from .lib import (
+ get_batch_asset_task_info,
+ get_webpublish_conn,
+ start_webpublish_log,
+ publish_and_log,
+ fail_batch,
+ find_variant_key,
+ get_task_data,
+ get_timeout,
+ IN_PROGRESS_STATUS
+)
+
+
+def cli_publish(project_name, batch_path, user_email, targets):
+ """Start headless publishing.
+
+ Used to publish rendered assets, workfiles etc via Webpublisher.
+ Eventually should be yanked out to Webpublisher cli.
+
+ Publish use json from passed paths argument.
+
+ Args:
+ project_name (str): project to publish (only single context is
+ expected per call of remotepublish
+ batch_path (str): Path batch folder. Contains subfolders with
+ resources (workfile, another subfolder 'renders' etc.)
+ user_email (string): email address for webpublisher - used to
+ find Ftrack user with same email
+ targets (list): Pyblish targets
+ (to choose validator for example)
+
+ Raises:
+ RuntimeError: When there is no path to process.
+ """
+
+ if not batch_path:
+ raise RuntimeError("No publish paths specified")
+
+ log = Logger.get_logger("remotepublish")
+ log.info("remotepublish command")
+
+ # Register target and host
+ webpublisher_host = WebpublisherHost()
+
+ os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
+ os.environ["AVALON_PROJECT"] = project_name
+ os.environ["AVALON_APP"] = webpublisher_host.name
+ os.environ["USER_EMAIL"] = user_email
+ os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
+
+ if targets:
+ if isinstance(targets, str):
+ targets = [targets]
+ for target in targets:
+ pyblish.api.register_target(target)
+
+ install_host(webpublisher_host)
+
+ log.info("Running publish ...")
+
+ _, batch_id = os.path.split(batch_path)
+ dbcon = get_webpublish_conn()
+ _id = start_webpublish_log(dbcon, batch_id, user_email)
+
+ task_data = get_task_data(batch_path)
+ if not task_data["context"]:
+ msg = "Batch manifest must contain context data"
+ msg += "Create new batch and set context properly."
+ fail_batch(_id, dbcon, msg)
+
+ publish_and_log(dbcon, _id, log, batch_id=batch_id)
+
+ log.info("Publish finished.")
+
+
+def cli_publish_from_app(
+ project_name, batch_path, host_name, user_email, targets
+):
+ """Opens installed variant of 'host' and run remote publish there.
+
+ Eventually should be yanked out to Webpublisher cli.
+
+ Currently implemented and tested for Photoshop where customer
+ wants to process uploaded .psd file and publish collected layers
+ from there. Triggered by Webpublisher.
+
+ Checks if no other batches are running (status =='in_progress). If
+ so, it sleeps for SLEEP (this is separate process),
+ waits for WAIT_FOR seconds altogether.
+
+ Requires installed host application on the machine.
+
+ Runs publish process as user would, in automatic fashion.
+
+ Args:
+ project_name (str): project to publish (only single context is
+ expected per call of remotepublish
+ batch_path (str): Path batch folder. Contains subfolders with
+ resources (workfile, another subfolder 'renders' etc.)
+ host_name (str): 'photoshop'
+ user_email (string): email address for webpublisher - used to
+ find Ftrack user with same email
+ targets (list): Pyblish targets
+ (to choose validator for example)
+ """
+
+ log = Logger.get_logger("RemotePublishFromApp")
+
+ log.info("remotepublishphotoshop command")
+
+ task_data = get_task_data(batch_path)
+
+ workfile_path = os.path.join(batch_path,
+ task_data["task"],
+ task_data["files"][0])
+
+ print("workfile_path {}".format(workfile_path))
+
+ batch_id = task_data["batch"]
+ dbcon = get_webpublish_conn()
+ # safer to start logging here, launch might be broken altogether
+ _id = start_webpublish_log(dbcon, batch_id, user_email)
+
+ batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS}))
+ if len(batches_in_progress) > 1:
+ running_batches = [str(batch["_id"])
+ for batch in batches_in_progress
+ if batch["_id"] != _id]
+ msg = "There are still running batches {}\n". \
+ format("\n".join(running_batches))
+ msg += "Ask admin to check them and reprocess current batch"
+ fail_batch(_id, dbcon, msg)
+
+ if not task_data["context"]:
+ msg = "Batch manifest must contain context data"
+ msg += "Create new batch and set context properly."
+ fail_batch(_id, dbcon, msg)
+
+ asset_name, task_name, task_type = get_batch_asset_task_info(
+ task_data["context"])
+
+ application_manager = ApplicationManager()
+ found_variant_key = find_variant_key(application_manager, host_name)
+ app_name = "{}/{}".format(host_name, found_variant_key)
+
+ # must have for proper launch of app
+ env = get_app_environments_for_context(
+ project_name,
+ asset_name,
+ task_name,
+ app_name
+ )
+ print("env:: {}".format(env))
+ os.environ.update(env)
+
+ os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
+ # must pass identifier to update log lines for a batch
+ os.environ["BATCH_LOG_ID"] = str(_id)
+ os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
+ os.environ["USER_EMAIL"] = user_email
+
+ pyblish.api.register_host(host_name)
+ if targets:
+ if isinstance(targets, str):
+ targets = [targets]
+ current_targets = os.environ.get("PYBLISH_TARGETS", "").split(
+ os.pathsep)
+ for target in targets:
+ current_targets.append(target)
+
+ os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
+ set(current_targets))
+
+ data = {
+ "last_workfile_path": workfile_path,
+ "start_last_workfile": True,
+ "project_name": project_name,
+ "asset_name": asset_name,
+ "task_name": task_name
+ }
+
+ launched_app = application_manager.launch(app_name, **data)
+
+ timeout = get_timeout(project_name, host_name, task_type)
+
+ time_start = time.time()
+ while launched_app.poll() is None:
+ time.sleep(0.5)
+ if time.time() - time_start > timeout:
+ launched_app.terminate()
+ msg = "Timeout reached"
+ fail_batch(_id, dbcon, msg)
diff --git a/openpype/hosts/webpublisher/webserver_service/__init__.py b/openpype/hosts/webpublisher/webserver_service/__init__.py
new file mode 100644
index 0000000000..73111d286e
--- /dev/null
+++ b/openpype/hosts/webpublisher/webserver_service/__init__.py
@@ -0,0 +1,6 @@
+from .webserver import run_webserver
+
+
+__all__ = (
+ "run_webserver",
+)
diff --git a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
index 6444a5191d..4039d2c8ec 100644
--- a/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
+++ b/openpype/hosts/webpublisher/webserver_service/webpublish_routes.py
@@ -10,20 +10,19 @@ from aiohttp.web_response import Response
from openpype.client import (
get_projects,
get_assets,
- OpenPypeMongoConnection,
)
-from openpype.lib import (
- PypeLogger,
-)
-from openpype.lib.remote_publish import (
+from openpype.lib import Logger
+from openpype.settings import get_project_settings
+from openpype_modules.webserver.base_routes import RestApiEndpoint
+from openpype_modules.webpublisher import WebpublisherAddon
+from openpype_modules.webpublisher.lib import (
+ get_webpublish_conn,
get_task_data,
ERROR_STATUS,
REPROCESS_STATUS
)
-from openpype.settings import get_project_settings
-from openpype_modules.webserver.base_routes import RestApiEndpoint
-log = PypeLogger.get_logger("WebpublishRoutes")
+log = Logger.get_logger("WebpublishRoutes")
class ResourceRestApiEndpoint(RestApiEndpoint):
@@ -79,9 +78,7 @@ class WebpublishRestApiResource(JsonApiResource):
"""Resource carrying OP DB connection for storing batch info into DB."""
def __init__(self):
- mongo_client = OpenPypeMongoConnection.get_mongo_client()
- database_name = os.environ["OPENPYPE_DATABASE_NAME"]
- self.dbcon = mongo_client[database_name]["webpublishes"]
+ self.dbcon = get_webpublish_conn()
class ProjectsEndpoint(ResourceRestApiEndpoint):
@@ -217,7 +214,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
# TVPaint filter
{
"extensions": [".tvpp"],
- "command": "remotepublish",
+ "command": "publish",
"arguments": {
"targets": ["tvpaint_worker"]
},
@@ -226,13 +223,13 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
# Photoshop filter
{
"extensions": [".psd", ".psb"],
- "command": "remotepublishfromapp",
+ "command": "publishfromapp",
"arguments": {
- # Command 'remotepublishfromapp' requires --host argument
+ # Command 'publishfromapp' requires --host argument
"host": "photoshop",
# Make sure targets are set to None for cases that default
# would change
- # - targets argument is not used in 'remotepublishfromapp'
+ # - targets argument is not used in 'publishfromapp'
"targets": ["remotepublish"]
},
# does publish need to be handled by a queue, eg. only
@@ -244,7 +241,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
batch_dir = os.path.join(self.resource.upload_dir, content["batch"])
# Default command and arguments
- command = "remotepublish"
+ command = "publish"
add_args = {
# All commands need 'project' and 'user'
"project": content["project_name"],
@@ -275,6 +272,8 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
args = [
openpype_app,
+ "module",
+ WebpublisherAddon.name,
command,
batch_dir
]
diff --git a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py b/openpype/hosts/webpublisher/webserver_service/webserver.py
similarity index 91%
rename from openpype/hosts/webpublisher/webserver_service/webserver_cli.py
rename to openpype/hosts/webpublisher/webserver_service/webserver.py
index 6620e5d5cf..093b53d9d3 100644
--- a/openpype/hosts/webpublisher/webserver_service/webserver_cli.py
+++ b/openpype/hosts/webpublisher/webserver_service/webserver.py
@@ -7,7 +7,14 @@ import json
import subprocess
from openpype.client import OpenPypeMongoConnection
-from openpype.lib import PypeLogger
+from openpype.modules import ModulesManager
+from openpype.lib import Logger
+
+from openpype_modules.webpublisher.lib import (
+ ERROR_STATUS,
+ REPROCESS_STATUS,
+ SENT_REPROCESSING_STATUS
+)
from .webpublish_routes import (
RestApiResource,
@@ -21,32 +28,29 @@ from .webpublish_routes import (
TaskPublishEndpoint,
UserReportEndpoint
)
-from openpype.lib.remote_publish import (
- ERROR_STATUS,
- REPROCESS_STATUS,
- SENT_REPROCESSING_STATUS
-)
+
+log = Logger.get_logger("webserver_gui")
-log = PypeLogger.get_logger("webserver_gui")
-
-
-def run_webserver(*args, **kwargs):
+def run_webserver(executable, upload_dir, host=None, port=None):
"""Runs webserver in command line, adds routes."""
- from openpype.modules import ModulesManager
+
+ if not host:
+ host = "localhost"
+ if not port:
+ port = 8079
manager = ModulesManager()
webserver_module = manager.modules_by_name["webserver"]
- host = kwargs.get("host") or "localhost"
- port = kwargs.get("port") or 8079
+
server_manager = webserver_module.create_new_server_manager(port, host)
webserver_url = server_manager.url
# queue for remotepublishfromapp tasks
studio_task_queue = collections.deque()
resource = RestApiResource(server_manager,
- upload_dir=kwargs["upload_dir"],
- executable=kwargs["executable"],
+ upload_dir=upload_dir,
+ executable=executable,
studio_task_queue=studio_task_queue)
projects_endpoint = ProjectsEndpoint(resource)
server_manager.add_route(
@@ -111,7 +115,7 @@ def run_webserver(*args, **kwargs):
last_reprocessed = time.time()
while True:
if time.time() - last_reprocessed > 20:
- reprocess_failed(kwargs["upload_dir"], webserver_url)
+ reprocess_failed(upload_dir, webserver_url)
last_reprocessed = time.time()
if studio_task_queue:
args = studio_task_queue.popleft()
diff --git a/openpype/lib/applications.py b/openpype/lib/applications.py
index 074e815160..eaa4c1a0a8 100644
--- a/openpype/lib/applications.py
+++ b/openpype/lib/applications.py
@@ -24,7 +24,7 @@ from openpype.settings.constants import (
METADATA_KEYS,
M_DYNAMIC_KEY_LABEL
)
-from . import PypeLogger
+from .log import Logger
from .profiles_filtering import filter_profiles
from .local_settings import get_openpype_username
@@ -138,7 +138,7 @@ def get_logger():
"""Global lib.applications logger getter."""
global _logger
if _logger is None:
- _logger = PypeLogger.get_logger(__name__)
+ _logger = Logger.get_logger(__name__)
return _logger
@@ -373,7 +373,7 @@ class ApplicationManager:
"""
def __init__(self, system_settings=None):
- self.log = PypeLogger.get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
self.app_groups = {}
self.applications = {}
@@ -735,7 +735,7 @@ class LaunchHook:
Always should be called
"""
- self.log = PypeLogger().get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
self.launch_context = launch_context
@@ -877,7 +877,7 @@ class ApplicationLaunchContext:
# Logger
logger_name = "{}-{}".format(self.__class__.__name__, self.app_name)
- self.log = PypeLogger.get_logger(logger_name)
+ self.log = Logger.get_logger(logger_name)
self.executable = executable
@@ -950,6 +950,63 @@ class ApplicationLaunchContext:
)
self.kwargs["env"] = value
+ def _collect_addons_launch_hook_paths(self):
+ """Helper to collect application launch hooks from addons.
+
+ Module have to have implemented 'get_launch_hook_paths' method which
+ can expect appliction as argument or nothing.
+
+ Returns:
+ List[str]: Paths to launch hook directories.
+ """
+
+ expected_types = (list, tuple, set)
+
+ output = []
+ for module in self.modules_manager.get_enabled_modules():
+ # Skip module if does not have implemented 'get_launch_hook_paths'
+ func = getattr(module, "get_launch_hook_paths", None)
+ if func is None:
+ continue
+
+ func = module.get_launch_hook_paths
+ if hasattr(inspect, "signature"):
+ sig = inspect.signature(func)
+ expect_args = len(sig.parameters) > 0
+ else:
+ expect_args = len(inspect.getargspec(func)[0]) > 0
+
+ # Pass application argument if method expect it.
+ try:
+ if expect_args:
+ hook_paths = func(self.application)
+ else:
+ hook_paths = func()
+ except Exception:
+ self.log.warning(
+ "Failed to call 'get_launch_hook_paths'",
+ exc_info=True
+ )
+ continue
+
+ if not hook_paths:
+ continue
+
+ # Convert string to list
+ if isinstance(hook_paths, six.string_types):
+ hook_paths = [hook_paths]
+
+ # Skip invalid types
+ if not isinstance(hook_paths, expected_types):
+ self.log.warning((
+ "Result of `get_launch_hook_paths`"
+ " has invalid type {}. Expected {}"
+ ).format(type(hook_paths), expected_types))
+ continue
+
+ output.extend(hook_paths)
+ return output
+
def paths_to_launch_hooks(self):
"""Directory paths where to look for launch hooks."""
# This method has potential to be part of application manager (maybe).
@@ -983,9 +1040,7 @@ class ApplicationLaunchContext:
paths.append(path)
# Load modules paths
- paths.extend(
- self.modules_manager.collect_launch_hook_paths(self.application)
- )
+ paths.extend(self._collect_addons_launch_hook_paths())
return paths
diff --git a/openpype/lib/execute.py b/openpype/lib/execute.py
index c3e35772f3..f1f2a4fa0a 100644
--- a/openpype/lib/execute.py
+++ b/openpype/lib/execute.py
@@ -5,7 +5,7 @@ import platform
import json
import tempfile
-from .log import PypeLogger as Logger
+from .log import Logger
from .vendor_bin_utils import find_executable
# MSDN process creation flag (Windows only)
@@ -40,7 +40,7 @@ def execute(args,
log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:']
- log = Logger().get_logger('execute')
+ log = Logger.get_logger('execute')
log.info("Executing ({})".format(" ".join(args)))
popen = subprocess.Popen(
args,
diff --git a/openpype/lib/log.py b/openpype/lib/log.py
index e77edea0e9..26dcd86eec 100644
--- a/openpype/lib/log.py
+++ b/openpype/lib/log.py
@@ -486,12 +486,18 @@ class Logger:
class PypeLogger(Logger):
+ """Duplicate of 'Logger'.
+
+ Deprecated:
+ Class will be removed after release version 3.16.*
+ """
+
@classmethod
def get_logger(cls, *args, **kwargs):
logger = Logger.get_logger(*args, **kwargs)
# TODO uncomment when replaced most of places
- # logger.warning((
- # "'openpype.lib.PypeLogger' is deprecated class."
- # " Please use 'openpype.lib.Logger' instead."
- # ))
+ logger.warning((
+ "'openpype.lib.PypeLogger' is deprecated class."
+ " Please use 'openpype.lib.Logger' instead."
+ ))
return logger
diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py
index e4b18ec258..b160054e38 100644
--- a/openpype/lib/path_templates.py
+++ b/openpype/lib/path_templates.py
@@ -6,11 +6,6 @@ import collections
import six
-from .log import PypeLogger
-
-log = PypeLogger.get_logger(__name__)
-
-
KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})")
KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+")
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
diff --git a/openpype/lib/plugin_tools.py b/openpype/lib/plugin_tools.py
index 9080918dfa..a28d138dc7 100644
--- a/openpype/lib/plugin_tools.py
+++ b/openpype/lib/plugin_tools.py
@@ -373,48 +373,3 @@ def source_hash(filepath, *args):
time = str(os.path.getmtime(filepath))
size = str(os.path.getsize(filepath))
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
-
-
-def parse_json(path):
- """Parses json file at 'path' location
-
- Returns:
- (dict) or None if unparsable
- Raises:
- AsssertionError if 'path' doesn't exist
- """
- path = path.strip('\"')
- assert os.path.isfile(path), (
- "Path to json file doesn't exist. \"{}\"".format(path)
- )
- data = None
- with open(path, "r") as json_file:
- try:
- data = json.load(json_file)
- except Exception as exc:
- log.error(
- "Error loading json: "
- "{} - Exception: {}".format(path, exc)
- )
- return data
-
-
-def get_batch_asset_task_info(ctx):
- """Parses context data from webpublisher's batch metadata
-
- Returns:
- (tuple): asset, task_name (Optional), task_type
- """
- task_type = "default_task_type"
- task_name = None
- asset = None
-
- if ctx["type"] == "task":
- items = ctx["path"].split('/')
- asset = items[-2]
- task_name = ctx["name"]
- task_type = ctx["attributes"]["type"]
- else:
- asset = ctx["name"]
-
- return asset, task_name, task_type
diff --git a/openpype/modules/__init__.py b/openpype/modules/__init__.py
index 68b5f6c247..02e7dc13ab 100644
--- a/openpype/modules/__init__.py
+++ b/openpype/modules/__init__.py
@@ -2,7 +2,6 @@
from .base import (
OpenPypeModule,
OpenPypeAddOn,
- OpenPypeInterface,
load_modules,
@@ -20,7 +19,6 @@ from .base import (
__all__ = (
"OpenPypeModule",
"OpenPypeAddOn",
- "OpenPypeInterface",
"load_modules",
diff --git a/openpype/modules/base.py b/openpype/modules/base.py
index 1316d7f734..6db6ee9524 100644
--- a/openpype/modules/base.py
+++ b/openpype/modules/base.py
@@ -13,7 +13,6 @@ from uuid import uuid4
from abc import ABCMeta, abstractmethod
import six
-import openpype
from openpype.settings import (
get_system_settings,
SYSTEM_SETTINGS_KEY,
@@ -26,7 +25,20 @@ from openpype.settings.lib import (
get_studio_system_settings_overrides,
load_json_file
)
-from openpype.lib import PypeLogger
+
+from openpype.lib import (
+ Logger,
+ import_filepath,
+ import_module_from_dirpath
+)
+
+from .interfaces import (
+ OpenPypeInterface,
+ IPluginPaths,
+ IHostModule,
+ ITrayModule,
+ ITrayService
+)
# Files that will be always ignored on modules import
IGNORED_FILENAMES = (
@@ -93,7 +105,7 @@ class _ModuleClass(object):
def log(self):
if self._log is None:
super(_ModuleClass, self).__setattr__(
- "_log", PypeLogger.get_logger(self.name)
+ "_log", Logger.get_logger(self.name)
)
return self._log
@@ -278,19 +290,13 @@ def load_modules(force=False):
def _load_modules():
- # Import helper functions from lib
- from openpype.lib import (
- import_filepath,
- import_module_from_dirpath
- )
-
# Key under which will be modules imported in `sys.modules`
modules_key = "openpype_modules"
# Change `sys.modules`
sys.modules[modules_key] = openpype_modules = _ModuleClass(modules_key)
- log = PypeLogger.get_logger("ModulesLoader")
+ log = Logger.get_logger("ModulesLoader")
# Look for OpenPype modules in paths defined with `get_module_dirs`
# - dynamically imported OpenPype modules and addons
@@ -391,29 +397,7 @@ def _load_modules():
log.error(msg, exc_info=True)
-class _OpenPypeInterfaceMeta(ABCMeta):
- """OpenPypeInterface meta class to print proper string."""
- def __str__(self):
- return "<'OpenPypeInterface.{}'>".format(self.__name__)
-
- def __repr__(self):
- return str(self)
-
-
-@six.add_metaclass(_OpenPypeInterfaceMeta)
-class OpenPypeInterface:
- """Base class of Interface that can be used as Mixin with abstract parts.
-
- This is way how OpenPype module or addon can tell that has implementation
- for specific part or for other module/addon.
-
- Child classes of OpenPypeInterface may be used as mixin in different
- OpenPype modules which means they have to have implemented methods defined
- in the interface. By default interface does not have any abstract parts.
- """
-
- pass
@six.add_metaclass(ABCMeta)
@@ -440,7 +424,7 @@ class OpenPypeModule:
def __init__(self, manager, settings):
self.manager = manager
- self.log = PypeLogger.get_logger(self.name)
+ self.log = Logger.get_logger(self.name)
self.initialize(settings)
@@ -749,8 +733,6 @@ class ModulesManager:
and "actions" each containing list of paths.
"""
# Output structure
- from openpype_interfaces import IPluginPaths
-
output = {
"publish": [],
"create": [],
@@ -807,8 +789,6 @@ class ModulesManager:
list: List of creator plugin paths.
"""
# Output structure
- from openpype_interfaces import IPluginPaths
-
output = []
for module in self.get_enabled_modules():
# Skip module that do not inherit from `IPluginPaths`
@@ -823,68 +803,6 @@ class ModulesManager:
output.extend(paths)
return output
- def collect_launch_hook_paths(self, app):
- """Helper to collect application launch hooks.
-
- It used to be based on 'ILaunchHookPaths' which is not true anymore.
- Module just have to have implemented 'get_launch_hook_paths' method.
-
- Args:
- app (Application): Application object which can be used for
- filtering of which launch hook paths are returned.
-
- Returns:
- list: Paths to launch hook directories.
- """
-
- str_type = type("")
- expected_types = (list, tuple, set)
-
- output = []
- for module in self.get_enabled_modules():
- # Skip module if does not have implemented 'get_launch_hook_paths'
- func = getattr(module, "get_launch_hook_paths", None)
- if func is None:
- continue
-
- func = module.get_launch_hook_paths
- if hasattr(inspect, "signature"):
- sig = inspect.signature(func)
- expect_args = len(sig.parameters) > 0
- else:
- expect_args = len(inspect.getargspec(func)[0]) > 0
-
- # Pass application argument if method expect it.
- try:
- if expect_args:
- hook_paths = func(app)
- else:
- hook_paths = func()
- except Exception:
- self.log.warning(
- "Failed to call 'get_launch_hook_paths'",
- exc_info=True
- )
- continue
-
- if not hook_paths:
- continue
-
- # Convert string to list
- if isinstance(hook_paths, str_type):
- hook_paths = [hook_paths]
-
- # Skip invalid types
- if not isinstance(hook_paths, expected_types):
- self.log.warning((
- "Result of `get_launch_hook_paths`"
- " has invalid type {}. Expected {}"
- ).format(type(hook_paths), expected_types))
- continue
-
- output.extend(hook_paths)
- return output
-
def get_host_module(self, host_name):
"""Find host module by host name.
@@ -897,8 +815,6 @@ class ModulesManager:
host name set to passed 'host_name'.
"""
- from openpype_interfaces import IHostModule
-
for module in self.get_enabled_modules():
if (
isinstance(module, IHostModule)
@@ -915,8 +831,6 @@ class ModulesManager:
inheriting 'IHostModule'.
"""
- from openpype_interfaces import IHostModule
-
host_names = {
module.host_name
for module in self.get_enabled_modules()
@@ -1059,7 +973,7 @@ class TrayModulesManager(ModulesManager):
)
def __init__(self):
- self.log = PypeLogger.get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
self.modules = []
self.modules_by_id = {}
@@ -1098,8 +1012,6 @@ class TrayModulesManager(ModulesManager):
self.tray_menu(tray_menu)
def get_enabled_tray_modules(self):
- from openpype_interfaces import ITrayModule
-
output = []
for module in self.modules:
if module.enabled and isinstance(module, ITrayModule):
@@ -1175,8 +1087,6 @@ class TrayModulesManager(ModulesManager):
self._report["Tray menu"] = report
def start_modules(self):
- from openpype_interfaces import ITrayService
-
report = {}
time_start = time.time()
prev_start_time = time_start
@@ -1235,7 +1145,7 @@ def get_module_settings_defs():
settings_defs = []
- log = PypeLogger.get_logger("ModuleSettingsLoad")
+ log = Logger.get_logger("ModuleSettingsLoad")
for raw_module in openpype_modules:
for attr_name in dir(raw_module):
diff --git a/openpype/modules/deadline/deadline_module.py b/openpype/modules/deadline/deadline_module.py
index c30db75188..bbd0f74e8a 100644
--- a/openpype/modules/deadline/deadline_module.py
+++ b/openpype/modules/deadline/deadline_module.py
@@ -3,7 +3,7 @@ import requests
import six
import sys
-from openpype.lib import requests_get, PypeLogger
+from openpype.lib import requests_get, Logger
from openpype.modules import OpenPypeModule
from openpype_interfaces import IPluginPaths
@@ -58,7 +58,7 @@ class DeadlineModule(OpenPypeModule, IPluginPaths):
"""
if not log:
- log = PypeLogger.get_logger(__name__)
+ log = Logger.get_logger(__name__)
argument = "{}/api/pools?NamesOnly=true".format(webservice)
try:
diff --git a/openpype/modules/ftrack/ftrack_module.py b/openpype/modules/ftrack/ftrack_module.py
index f99e189082..cb4f204523 100644
--- a/openpype/modules/ftrack/ftrack_module.py
+++ b/openpype/modules/ftrack/ftrack_module.py
@@ -9,7 +9,6 @@ from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITrayModule,
IPluginPaths,
- ILaunchHookPaths,
ISettingsChangeListener
)
from openpype.settings import SaveWarningExc
@@ -21,7 +20,6 @@ class FtrackModule(
OpenPypeModule,
ITrayModule,
IPluginPaths,
- ILaunchHookPaths,
ISettingsChangeListener
):
name = "ftrack"
@@ -85,7 +83,8 @@ class FtrackModule(
}
def get_launch_hook_paths(self):
- """Implementation of `ILaunchHookPaths`."""
+ """Implementation for applications launch hooks."""
+
return os.path.join(FTRACK_MODULE_DIR, "launch_hooks")
def modify_application_launch_arguments(self, application, env):
diff --git a/openpype/modules/ftrack/ftrack_server/ftrack_server.py b/openpype/modules/ftrack/ftrack_server/ftrack_server.py
index 8944591b71..c75b8f7172 100644
--- a/openpype/modules/ftrack/ftrack_server/ftrack_server.py
+++ b/openpype/modules/ftrack/ftrack_server/ftrack_server.py
@@ -7,12 +7,10 @@ import traceback
import ftrack_api
from openpype.lib import (
- PypeLogger,
+ Logger,
modules_from_path
)
-log = PypeLogger.get_logger(__name__)
-
"""
# Required - Needed for connection to Ftrack
FTRACK_SERVER # Ftrack server e.g. "https://myFtrack.ftrackapp.com"
@@ -43,10 +41,13 @@ class FtrackServer:
server.run_server()
..
"""
+
# set Ftrack logging to Warning only - OPTIONAL
ftrack_log = logging.getLogger("ftrack_api")
ftrack_log.setLevel(logging.WARNING)
+ self.log = Logger.get_logger(__name__)
+
self.stopped = True
self.is_running = False
@@ -72,7 +73,7 @@ class FtrackServer:
# Get all modules with functions
modules, crashed = modules_from_path(path)
for filepath, exc_info in crashed:
- log.warning("Filepath load crashed {}.\n{}".format(
+ self.log.warning("Filepath load crashed {}.\n{}".format(
filepath, traceback.format_exception(*exc_info)
))
@@ -87,7 +88,7 @@ class FtrackServer:
break
if not register_function:
- log.warning(
+ self.log.warning(
"\"{}\" - Missing register method".format(filepath)
)
continue
@@ -97,7 +98,7 @@ class FtrackServer:
)
if not register_functions:
- log.warning((
+ self.log.warning((
"There are no events with `register` function"
" in registered paths: \"{}\""
).format("| ".join(paths)))
@@ -106,7 +107,7 @@ class FtrackServer:
try:
register_func(self.session)
except Exception:
- log.warning(
+ self.log.warning(
"\"{}\" - register was not successful".format(filepath),
exc_info=True
)
@@ -141,7 +142,7 @@ class FtrackServer:
self.session = session
if load_files:
if not self.handler_paths:
- log.warning((
+ self.log.warning((
"Paths to event handlers are not set."
" Ftrack server won't launch."
))
@@ -151,8 +152,8 @@ class FtrackServer:
self.set_files(self.handler_paths)
msg = "Registration of event handlers has finished!"
- log.info(len(msg) * "*")
- log.info(msg)
+ self.log.info(len(msg) * "*")
+ self.log.info(msg)
# keep event_hub on session running
self.session.event_hub.wait()
diff --git a/openpype/modules/ftrack/ftrack_server/socket_thread.py b/openpype/modules/ftrack/ftrack_server/socket_thread.py
index f49ca5557e..3ef55f8daa 100644
--- a/openpype/modules/ftrack/ftrack_server/socket_thread.py
+++ b/openpype/modules/ftrack/ftrack_server/socket_thread.py
@@ -5,8 +5,8 @@ import socket
import threading
import traceback
import subprocess
-from openpype.api import Logger
-from openpype.lib import get_openpype_execute_args
+
+from openpype.lib import get_openpype_execute_args, Logger
class SocketThread(threading.Thread):
@@ -16,7 +16,7 @@ class SocketThread(threading.Thread):
def __init__(self, name, port, filepath, additional_args=[]):
super(SocketThread, self).__init__()
- self.log = Logger().get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
self.setName(name)
self.name = name
self.port = port
diff --git a/openpype/modules/ftrack/lib/ftrack_base_handler.py b/openpype/modules/ftrack/lib/ftrack_base_handler.py
index c0fad6aadc..c0b03f8a41 100644
--- a/openpype/modules/ftrack/lib/ftrack_base_handler.py
+++ b/openpype/modules/ftrack/lib/ftrack_base_handler.py
@@ -6,7 +6,7 @@ import uuid
import datetime
import traceback
import time
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.settings import get_project_settings
import ftrack_api
@@ -52,7 +52,7 @@ class BaseHandler(object):
def __init__(self, session):
'''Expects a ftrack_api.Session instance'''
- self.log = Logger().get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
if not(
isinstance(session, ftrack_api.session.Session) or
isinstance(session, ftrack_server.lib.SocketSession)
diff --git a/openpype/modules/ftrack/scripts/sub_event_processor.py b/openpype/modules/ftrack/scripts/sub_event_processor.py
index d1e2e3aaeb..a5ce0511b8 100644
--- a/openpype/modules/ftrack/scripts/sub_event_processor.py
+++ b/openpype/modules/ftrack/scripts/sub_event_processor.py
@@ -4,6 +4,8 @@ import signal
import socket
import datetime
+import ftrack_api
+
from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer
from openpype_modules.ftrack.ftrack_server.lib import (
SocketSession,
@@ -12,17 +14,12 @@ from openpype_modules.ftrack.ftrack_server.lib import (
)
from openpype.modules import ModulesManager
-from openpype.api import Logger
from openpype.lib import (
+ Logger,
get_openpype_version,
get_build_version
)
-
-import ftrack_api
-
-log = Logger().get_logger("Event processor")
-
subprocess_started = datetime.datetime.now()
@@ -68,6 +65,8 @@ def register(session):
def main(args):
+ log = Logger.get_logger("Event processor")
+
port = int(args[-1])
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
diff --git a/openpype/modules/ftrack/scripts/sub_legacy_server.py b/openpype/modules/ftrack/scripts/sub_legacy_server.py
index e3a623c376..1f0fc1b369 100644
--- a/openpype/modules/ftrack/scripts/sub_legacy_server.py
+++ b/openpype/modules/ftrack/scripts/sub_legacy_server.py
@@ -5,11 +5,11 @@ import signal
import threading
import ftrack_api
-from openpype.api import Logger
+from openpype.lib import Logger
from openpype.modules import ModulesManager
from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer
-log = Logger().get_logger("Event Server Legacy")
+log = Logger.get_logger("Event Server Legacy")
class TimerChecker(threading.Thread):
diff --git a/openpype/modules/ftrack/scripts/sub_user_server.py b/openpype/modules/ftrack/scripts/sub_user_server.py
index a3701a0950..930a2d51e2 100644
--- a/openpype/modules/ftrack/scripts/sub_user_server.py
+++ b/openpype/modules/ftrack/scripts/sub_user_server.py
@@ -2,6 +2,7 @@ import sys
import signal
import socket
+from openpype.lib import Logger
from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer
from openpype_modules.ftrack.ftrack_server.lib import (
SocketSession,
@@ -9,9 +10,7 @@ from openpype_modules.ftrack.ftrack_server.lib import (
)
from openpype.modules import ModulesManager
-from openpype.api import Logger
-
-log = Logger().get_logger("FtrackUserServer")
+log = Logger.get_logger("FtrackUserServer")
def main(args):
diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py
index 2919ae22fb..501d837a4c 100644
--- a/openpype/modules/ftrack/tray/ftrack_tray.py
+++ b/openpype/modules/ftrack/tray/ftrack_tray.py
@@ -12,10 +12,11 @@ from ..lib import credentials
from ..ftrack_module import FTRACK_MODULE_DIR
from . import login_dialog
-from openpype.api import Logger, resources
+from openpype import resources
+from openpype.lib import Logger
-log = Logger().get_logger("FtrackModule")
+log = Logger.get_logger("FtrackModule")
class FtrackTrayWrapper:
diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py
index 14f49204ee..13655773dd 100644
--- a/openpype/modules/interfaces.py
+++ b/openpype/modules/interfaces.py
@@ -1,8 +1,33 @@
-from abc import abstractmethod, abstractproperty
+from abc import ABCMeta, abstractmethod, abstractproperty
+
+import six
from openpype import resources
-from openpype.modules import OpenPypeInterface
+
+class _OpenPypeInterfaceMeta(ABCMeta):
+ """OpenPypeInterface meta class to print proper string."""
+
+ def __str__(self):
+ return "<'OpenPypeInterface.{}'>".format(self.__name__)
+
+ def __repr__(self):
+ return str(self)
+
+
+@six.add_metaclass(_OpenPypeInterfaceMeta)
+class OpenPypeInterface:
+ """Base class of Interface that can be used as Mixin with abstract parts.
+
+ This is way how OpenPype module or addon can tell OpenPype that contain
+ implementation for specific functionality.
+
+ Child classes of OpenPypeInterface may be used as mixin in different
+ OpenPype modules which means they have to have implemented methods defined
+ in the interface. By default interface does not have any abstract parts.
+ """
+
+ pass
class IPluginPaths(OpenPypeInterface):
@@ -56,6 +81,13 @@ class ILaunchHookPaths(OpenPypeInterface):
Expected result is list of paths.
["path/to/launch_hooks_dir"]
+
+ Deprecated:
+ This interface is not needed since OpenPype 3.14.*. Addon just have to
+ implement 'get_launch_hook_paths' which can expect Application object
+ or nothing as argument.
+
+ Interface class will be removed after 3.16.*.
"""
@abstractmethod
diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py
index aea62c381b..d369ffeb64 100644
--- a/openpype/modules/log_viewer/tray/models.py
+++ b/openpype/modules/log_viewer/tray/models.py
@@ -1,6 +1,6 @@
import collections
from Qt import QtCore, QtGui
-from openpype.lib.log import PypeLogger
+from openpype.lib import Logger
class LogModel(QtGui.QStandardItemModel):
@@ -41,14 +41,14 @@ class LogModel(QtGui.QStandardItemModel):
self.dbcon = None
# Crash if connection is not possible to skip this module
- if not PypeLogger.initialized:
- PypeLogger.initialize()
+ if not Logger.initialized:
+ Logger.initialize()
- connection = PypeLogger.get_log_mongo_connection()
+ connection = Logger.get_log_mongo_connection()
if connection:
- PypeLogger.bootstrap_mongo_log()
- database = connection[PypeLogger.log_database_name]
- self.dbcon = database[PypeLogger.log_collection_name]
+ Logger.bootstrap_mongo_log()
+ database = connection[Logger.log_database_name]
+ self.dbcon = database[Logger.log_collection_name]
def headerData(self, section, orientation, role):
if (
diff --git a/openpype/modules/royalrender/api.py b/openpype/modules/royalrender/api.py
index ed9e71f240..de1dba8724 100644
--- a/openpype/modules/royalrender/api.py
+++ b/openpype/modules/royalrender/api.py
@@ -5,13 +5,10 @@ import os
from openpype.settings import get_project_settings
from openpype.lib.local_settings import OpenPypeSettingsRegistry
-from openpype.lib import PypeLogger, run_subprocess
+from openpype.lib import Logger, run_subprocess
from .rr_job import RRJob, SubmitFile, SubmitterParameter
-log = PypeLogger.get_logger("RoyalRender")
-
-
class Api:
_settings = None
@@ -19,6 +16,7 @@ class Api:
RR_SUBMIT_API = 2
def __init__(self, settings, project=None):
+ self.log = Logger.get_logger("RoyalRender")
self._settings = settings
self._initialize_rr(project)
@@ -137,7 +135,7 @@ class Api:
rr_console += ".exe"
args = [rr_console, file]
- run_subprocess(" ".join(args), logger=log)
+ run_subprocess(" ".join(args), logger=self.log)
def _submit_using_api(self, file):
# type: (SubmitFile) -> None
@@ -159,11 +157,11 @@ class Api:
rr_server = tcp.getRRServer()
if len(rr_server) == 0:
- log.info("Got RR IP address {}".format(rr_server))
+ self.log.info("Got RR IP address {}".format(rr_server))
# TODO: Port is hardcoded in RR? If not, move it to Settings
if not tcp.setServer(rr_server, 7773):
- log.error(
+ self.log.error(
"Can not set RR server: {}".format(tcp.errorMessage()))
raise RoyalRenderException(tcp.errorMessage())
diff --git a/openpype/modules/shotgrid/shotgrid_module.py b/openpype/modules/shotgrid/shotgrid_module.py
index 5644f0c35f..281c6fdcad 100644
--- a/openpype/modules/shotgrid/shotgrid_module.py
+++ b/openpype/modules/shotgrid/shotgrid_module.py
@@ -3,7 +3,6 @@ import os
from openpype_interfaces import (
ITrayModule,
IPluginPaths,
- ILaunchHookPaths,
)
from openpype.modules import OpenPypeModule
@@ -11,9 +10,7 @@ from openpype.modules import OpenPypeModule
SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
-class ShotgridModule(
- OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths
-):
+class ShotgridModule(OpenPypeModule, ITrayModule, IPluginPaths):
leecher_manager_url = None
name = "shotgrid"
enabled = False
diff --git a/openpype/modules/slack/slack_module.py b/openpype/modules/slack/slack_module.py
index 9b2976d766..499c1c19ce 100644
--- a/openpype/modules/slack/slack_module.py
+++ b/openpype/modules/slack/slack_module.py
@@ -1,14 +1,11 @@
import os
from openpype.modules import OpenPypeModule
-from openpype_interfaces import (
- IPluginPaths,
- ILaunchHookPaths
-)
+from openpype.modules.interfaces import IPluginPaths
SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
-class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths):
+class SlackIntegrationModule(OpenPypeModule, IPluginPaths):
"""Allows sending notification to Slack channels during publishing."""
name = "slack"
@@ -18,7 +15,8 @@ class SlackIntegrationModule(OpenPypeModule, IPluginPaths, ILaunchHookPaths):
self.enabled = slack_settings["enabled"]
def get_launch_hook_paths(self):
- """Implementation of `ILaunchHookPaths`."""
+ """Implementation for applications launch hooks."""
+
return os.path.join(SLACK_MODULE_DIR, "launch_hooks")
def get_plugin_paths(self):
diff --git a/openpype/modules/sync_server/providers/abstract_provider.py b/openpype/modules/sync_server/providers/abstract_provider.py
index 8c2fe1cad9..e11a8ba71e 100644
--- a/openpype/modules/sync_server/providers/abstract_provider.py
+++ b/openpype/modules/sync_server/providers/abstract_provider.py
@@ -1,8 +1,8 @@
import abc
import six
-from openpype.api import Logger
+from openpype.lib import Logger
-log = Logger().get_logger("SyncServer")
+log = Logger.get_logger("SyncServer")
@six.add_metaclass(abc.ABCMeta)
@@ -10,6 +10,8 @@ class AbstractProvider:
CODE = ''
LABEL = ''
+ _log = None
+
def __init__(self, project_name, site_name, tree=None, presets=None):
self.presets = None
self.active = False
@@ -19,6 +21,12 @@ class AbstractProvider:
super(AbstractProvider, self).__init__()
+ @property
+ def log(self):
+ if self._log is None:
+ self._log = Logger.get_logger(self.__class__.__name__)
+ return self._log
+
@abc.abstractmethod
def is_active(self):
"""
@@ -199,11 +207,11 @@ class AbstractProvider:
path = anatomy.fill_root(path)
except KeyError:
msg = "Error in resolving local root from anatomy"
- log.error(msg)
+ self.log.error(msg)
raise ValueError(msg)
except IndexError:
msg = "Path {} contains unfillable placeholder"
- log.error(msg)
+ self.log.error(msg)
raise ValueError(msg)
return path
diff --git a/openpype/modules/sync_server/providers/dropbox.py b/openpype/modules/sync_server/providers/dropbox.py
index 89d6990841..e026ae7ef6 100644
--- a/openpype/modules/sync_server/providers/dropbox.py
+++ b/openpype/modules/sync_server/providers/dropbox.py
@@ -2,12 +2,9 @@ import os
import dropbox
-from openpype.api import Logger
from .abstract_provider import AbstractProvider
from ..utils import EditableScopes
-log = Logger().get_logger("SyncServer")
-
class DropboxHandler(AbstractProvider):
CODE = 'dropbox'
@@ -20,26 +17,26 @@ class DropboxHandler(AbstractProvider):
self.dbx = None
if not self.presets:
- log.info(
+ self.log.info(
"Sync Server: There are no presets for {}.".format(site_name)
)
return
if not self.presets["enabled"]:
- log.debug("Sync Server: Site {} not enabled for {}.".
+ self.log.debug("Sync Server: Site {} not enabled for {}.".
format(site_name, project_name))
return
token = self.presets.get("token", "")
if not token:
msg = "Sync Server: No access token for dropbox provider"
- log.info(msg)
+ self.log.info(msg)
return
team_folder_name = self.presets.get("team_folder_name", "")
if not team_folder_name:
msg = "Sync Server: No team folder name for dropbox provider"
- log.info(msg)
+ self.log.info(msg)
return
acting_as_member = self.presets.get("acting_as_member", "")
@@ -47,7 +44,7 @@ class DropboxHandler(AbstractProvider):
msg = (
"Sync Server: No acting member for dropbox provider"
)
- log.info(msg)
+ self.log.info(msg)
return
try:
@@ -55,7 +52,7 @@ class DropboxHandler(AbstractProvider):
token, acting_as_member, team_folder_name
)
except Exception as e:
- log.info("Could not establish dropbox object: {}".format(e))
+ self.log.info("Could not establish dropbox object: {}".format(e))
return
super(AbstractProvider, self).__init__()
@@ -448,7 +445,7 @@ class DropboxHandler(AbstractProvider):
path = anatomy.fill_root(path)
except KeyError:
msg = "Error in resolving local root from anatomy"
- log.error(msg)
+ self.log.error(msg)
raise ValueError(msg)
return path
diff --git a/openpype/modules/sync_server/providers/gdrive.py b/openpype/modules/sync_server/providers/gdrive.py
index bef707788b..9a3ce89cf5 100644
--- a/openpype/modules/sync_server/providers/gdrive.py
+++ b/openpype/modules/sync_server/providers/gdrive.py
@@ -5,12 +5,12 @@ import sys
import six
import platform
-from openpype.api import Logger
-from openpype.api import get_system_settings
+from openpype.lib import Logger
+from openpype.settings import get_system_settings
from .abstract_provider import AbstractProvider
from ..utils import time_function, ResumableError
-log = Logger().get_logger("SyncServer")
+log = Logger.get_logger("GDriveHandler")
try:
from googleapiclient.discovery import build
@@ -69,13 +69,17 @@ class GDriveHandler(AbstractProvider):
self.presets = presets
if not self.presets:
- log.info("Sync Server: There are no presets for {}.".
- format(site_name))
+ self.log.info(
+ "Sync Server: There are no presets for {}.".format(site_name)
+ )
return
if not self.presets["enabled"]:
- log.debug("Sync Server: Site {} not enabled for {}.".
- format(site_name, project_name))
+ self.log.debug(
+ "Sync Server: Site {} not enabled for {}.".format(
+ site_name, project_name
+ )
+ )
return
current_platform = platform.system().lower()
@@ -85,20 +89,22 @@ class GDriveHandler(AbstractProvider):
if not cred_path:
msg = "Sync Server: Please, fill the credentials for gdrive "\
"provider for platform '{}' !".format(current_platform)
- log.info(msg)
+ self.log.info(msg)
return
try:
cred_path = cred_path.format(**os.environ)
except KeyError as e:
- log.info("Sync Server: The key(s) {} does not exist in the "
- "environment variables".format(" ".join(e.args)))
+ self.log.info((
+ "Sync Server: The key(s) {} does not exist in the "
+ "environment variables"
+ ).format(" ".join(e.args)))
return
if not os.path.exists(cred_path):
msg = "Sync Server: No credentials for gdrive provider " + \
"for '{}' on path '{}'!".format(site_name, cred_path)
- log.info(msg)
+ self.log.info(msg)
return
self.service = None
@@ -318,7 +324,7 @@ class GDriveHandler(AbstractProvider):
fields='id')
media.stream()
- log.debug("Start Upload! {}".format(source_path))
+ self.log.debug("Start Upload! {}".format(source_path))
last_tick = status = response = None
status_val = 0
while response is None:
@@ -331,7 +337,7 @@ class GDriveHandler(AbstractProvider):
if not last_tick or \
time.time() - last_tick >= server.LOG_PROGRESS_SEC:
last_tick = time.time()
- log.debug("Uploaded %d%%." %
+ self.log.debug("Uploaded %d%%." %
int(status_val * 100))
server.update_db(project_name=project_name,
new_file_id=None,
@@ -350,8 +356,9 @@ class GDriveHandler(AbstractProvider):
if 'has not granted' in ex._get_reason().strip():
raise PermissionError(ex._get_reason().strip())
- log.warning("Forbidden received, hit quota. "
- "Injecting 60s delay.")
+ self.log.warning(
+ "Forbidden received, hit quota. Injecting 60s delay."
+ )
time.sleep(60)
return False
raise
@@ -417,7 +424,7 @@ class GDriveHandler(AbstractProvider):
if not last_tick or \
time.time() - last_tick >= server.LOG_PROGRESS_SEC:
last_tick = time.time()
- log.debug("Downloaded %d%%." %
+ self.log.debug("Downloaded %d%%." %
int(status_val * 100))
server.update_db(project_name=project_name,
new_file_id=None,
@@ -629,9 +636,9 @@ class GDriveHandler(AbstractProvider):
["gdrive"]
)
except KeyError:
- log.info(("Sync Server: There are no presets for Gdrive " +
- "provider.").
- format(str(provider_presets)))
+ log.info((
+ "Sync Server: There are no presets for Gdrive provider."
+ ).format(str(provider_presets)))
return
return provider_presets
@@ -704,7 +711,7 @@ class GDriveHandler(AbstractProvider):
roots[self.MY_DRIVE_STR] = self.service.files() \
.get(fileId='root').execute()
except errors.HttpError:
- log.warning("HttpError in sync loop, "
+ self.log.warning("HttpError in sync loop, "
"trying next loop",
exc_info=True)
raise ResumableError
@@ -727,7 +734,7 @@ class GDriveHandler(AbstractProvider):
Returns:
(dictionary) path as a key, folder id as a value
"""
- log.debug("build_tree len {}".format(len(folders)))
+ self.log.debug("build_tree len {}".format(len(folders)))
if not self.root: # build only when necessary, could be expensive
self.root = self._prepare_root_info()
@@ -779,9 +786,9 @@ class GDriveHandler(AbstractProvider):
loop_cnt += 1
if len(no_parents_yet) > 0:
- log.debug("Some folders path are not resolved {}".
+ self.log.debug("Some folders path are not resolved {}".
format(no_parents_yet))
- log.debug("Remove deleted folders from trash.")
+ self.log.debug("Remove deleted folders from trash.")
return tree
diff --git a/openpype/modules/sync_server/providers/sftp.py b/openpype/modules/sync_server/providers/sftp.py
index 302ffae3e6..40f11cb9dd 100644
--- a/openpype/modules/sync_server/providers/sftp.py
+++ b/openpype/modules/sync_server/providers/sftp.py
@@ -4,10 +4,10 @@ import time
import threading
import platform
-from openpype.api import Logger
-from openpype.api import get_system_settings
+from openpype.lib import Logger
+from openpype.settings import get_system_settings
from .abstract_provider import AbstractProvider
-log = Logger().get_logger("SyncServer")
+log = Logger.get_logger("SyncServer-SFTPHandler")
pysftp = None
try:
@@ -43,8 +43,9 @@ class SFTPHandler(AbstractProvider):
self.presets = presets
if not self.presets:
- log.warning("Sync Server: There are no presets for {}.".
- format(site_name))
+ self.log.warning(
+ "Sync Server: There are no presets for {}.".format(site_name)
+ )
return
# store to instance for reconnect
@@ -423,7 +424,7 @@ class SFTPHandler(AbstractProvider):
return pysftp.Connection(**conn_params)
except (paramiko.ssh_exception.SSHException,
pysftp.exceptions.ConnectionException):
- log.warning("Couldn't connect", exc_info=True)
+ self.log.warning("Couldn't connect", exc_info=True)
def _mark_progress(self, project_name, file, representation, server, site,
source_path, target_path, direction):
@@ -445,7 +446,7 @@ class SFTPHandler(AbstractProvider):
time.time() - last_tick >= server.LOG_PROGRESS_SEC:
status_val = target_file_size / source_file_size
last_tick = time.time()
- log.debug(direction + "ed %d%%." % int(status_val * 100))
+ self.log.debug(direction + "ed %d%%." % int(status_val * 100))
server.update_db(project_name=project_name,
new_file_id=None,
file=file,
diff --git a/openpype/modules/sync_server/sync_server.py b/openpype/modules/sync_server/sync_server.py
index 97538fcd4e..8b11055e65 100644
--- a/openpype/modules/sync_server/sync_server.py
+++ b/openpype/modules/sync_server/sync_server.py
@@ -6,14 +6,11 @@ import concurrent.futures
from concurrent.futures._base import CancelledError
from .providers import lib
-from openpype.lib import PypeLogger
+from openpype.lib import Logger
from .utils import SyncStatus, ResumableError
-log = PypeLogger().get_logger("SyncServer")
-
-
async def upload(module, project_name, file, representation, provider_name,
remote_site_name, tree=None, preset=None):
"""
@@ -238,6 +235,7 @@ class SyncServerThread(threading.Thread):
Stopped when tray is closed.
"""
def __init__(self, module):
+ self.log = Logger.get_logger(self.__class__.__name__)
super(SyncServerThread, self).__init__()
self.module = module
self.loop = None
@@ -249,17 +247,17 @@ class SyncServerThread(threading.Thread):
self.is_running = True
try:
- log.info("Starting Sync Server")
+ self.log.info("Starting Sync Server")
self.loop = asyncio.new_event_loop() # create new loop for thread
asyncio.set_event_loop(self.loop)
self.loop.set_default_executor(self.executor)
asyncio.ensure_future(self.check_shutdown(), loop=self.loop)
asyncio.ensure_future(self.sync_loop(), loop=self.loop)
- log.info("Sync Server Started")
+ self.log.info("Sync Server Started")
self.loop.run_forever()
except Exception:
- log.warning(
+ self.log.warning(
"Sync Server service has failed", exc_info=True
)
finally:
@@ -379,8 +377,9 @@ class SyncServerThread(threading.Thread):
))
processed_file_path.add(file_path)
- log.debug("Sync tasks count {}".
- format(len(task_files_to_process)))
+ self.log.debug("Sync tasks count {}".format(
+ len(task_files_to_process)
+ ))
files_created = await asyncio.gather(
*task_files_to_process,
return_exceptions=True)
@@ -399,28 +398,31 @@ class SyncServerThread(threading.Thread):
error)
duration = time.time() - start_time
- log.debug("One loop took {:.2f}s".format(duration))
+ self.log.debug("One loop took {:.2f}s".format(duration))
delay = self.module.get_loop_delay(project_name)
- log.debug("Waiting for {} seconds to new loop".format(delay))
+ self.log.debug(
+ "Waiting for {} seconds to new loop".format(delay)
+ )
self.timer = asyncio.create_task(self.run_timer(delay))
await asyncio.gather(self.timer)
except ConnectionResetError:
- log.warning("ConnectionResetError in sync loop, "
- "trying next loop",
- exc_info=True)
+ self.log.warning(
+ "ConnectionResetError in sync loop, trying next loop",
+ exc_info=True)
except CancelledError:
# just stopping server
pass
except ResumableError:
- log.warning("ResumableError in sync loop, "
- "trying next loop",
- exc_info=True)
+ self.log.warning(
+ "ResumableError in sync loop, trying next loop",
+ exc_info=True)
except Exception:
self.stop()
- log.warning("Unhandled except. in sync loop, stopping server",
- exc_info=True)
+ self.log.warning(
+ "Unhandled except. in sync loop, stopping server",
+ exc_info=True)
def stop(self):
"""Sets is_running flag to false, 'check_shutdown' shuts server down"""
@@ -433,16 +435,17 @@ class SyncServerThread(threading.Thread):
while self.is_running:
if self.module.long_running_tasks:
task = self.module.long_running_tasks.pop()
- log.info("starting long running")
+ self.log.info("starting long running")
await self.loop.run_in_executor(None, task["func"])
- log.info("finished long running")
+ self.log.info("finished long running")
self.module.projects_processed.remove(task["project_name"])
await asyncio.sleep(0.5)
tasks = [task for task in asyncio.all_tasks() if
task is not asyncio.current_task()]
list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks
results = await asyncio.gather(*tasks, return_exceptions=True)
- log.debug(f'Finished awaiting cancelled tasks, results: {results}...')
+ self.log.debug(
+ f'Finished awaiting cancelled tasks, results: {results}...')
await self.loop.shutdown_asyncgens()
# to really make sure everything else has time to stop
self.executor.shutdown(wait=True)
@@ -455,29 +458,32 @@ class SyncServerThread(threading.Thread):
def reset_timer(self):
"""Called when waiting for next loop should be skipped"""
- log.debug("Resetting timer")
+ self.log.debug("Resetting timer")
if self.timer:
self.timer.cancel()
self.timer = None
def _working_sites(self, project_name):
if self.module.is_project_paused(project_name):
- log.debug("Both sites same, skipping")
+ self.log.debug("Both sites same, skipping")
return None, None
local_site = self.module.get_active_site(project_name)
remote_site = self.module.get_remote_site(project_name)
if local_site == remote_site:
- log.debug("{}-{} sites same, skipping".format(local_site,
- remote_site))
+ self.log.debug("{}-{} sites same, skipping".format(
+ local_site, remote_site))
return None, None
configured_sites = _get_configured_sites(self.module, project_name)
if not all([local_site in configured_sites,
remote_site in configured_sites]):
- log.debug("Some of the sites {} - {} is not ".format(local_site,
- remote_site) +
- "working properly")
+ self.log.debug(
+ "Some of the sites {} - {} is not working properly".format(
+ local_site, remote_site
+ )
+ )
+
return None, None
return local_site, remote_site
diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py
index c7f9484e55..634b68c55f 100644
--- a/openpype/modules/sync_server/sync_server_module.py
+++ b/openpype/modules/sync_server/sync_server_module.py
@@ -13,7 +13,7 @@ from openpype.settings import (
get_project_settings,
get_system_settings,
)
-from openpype.lib import PypeLogger, get_local_site_id
+from openpype.lib import Logger, get_local_site_id
from openpype.pipeline import AvalonMongoDB, Anatomy
from openpype.settings.lib import (
get_default_anatomy_settings,
@@ -28,7 +28,7 @@ from .utils import time_function, SyncStatus, SiteAlreadyPresentError
from openpype.client import get_representations, get_representation_by_id
-log = PypeLogger.get_logger("SyncServer")
+log = Logger.get_logger("SyncServer")
class SyncServerModule(OpenPypeModule, ITrayModule):
@@ -462,7 +462,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
representation_id (string): MongoDB objectId value
site_name (string): 'gdrive', 'studio' etc.
"""
- log.info("Pausing SyncServer for {}".format(representation_id))
+ self.log.info("Pausing SyncServer for {}".format(representation_id))
self._paused_representations.add(representation_id)
self.reset_site_on_representation(project_name, representation_id,
site_name=site_name, pause=True)
@@ -479,7 +479,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
representation_id (string): MongoDB objectId value
site_name (string): 'gdrive', 'studio' etc.
"""
- log.info("Unpausing SyncServer for {}".format(representation_id))
+ self.log.info("Unpausing SyncServer for {}".format(representation_id))
try:
self._paused_representations.remove(representation_id)
except KeyError:
@@ -518,7 +518,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Args:
project_name (string): project_name name
"""
- log.info("Pausing SyncServer for {}".format(project_name))
+ self.log.info("Pausing SyncServer for {}".format(project_name))
self._paused_projects.add(project_name)
def unpause_project(self, project_name):
@@ -530,7 +530,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Args:
project_name (string):
"""
- log.info("Unpausing SyncServer for {}".format(project_name))
+ self.log.info("Unpausing SyncServer for {}".format(project_name))
try:
self._paused_projects.remove(project_name)
except KeyError:
@@ -558,14 +558,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
It won't check anything, not uploading/downloading...
"""
- log.info("Pausing SyncServer")
+ self.log.info("Pausing SyncServer")
self._paused = True
def unpause_server(self):
"""
Unpause server
"""
- log.info("Unpausing SyncServer")
+ self.log.info("Unpausing SyncServer")
self._paused = False
def is_paused(self):
@@ -876,7 +876,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
# val = val[platform.system().lower()]
# except KeyError:
# st = "{}'s field value {} should be".format(key, val) # noqa: E501
- # log.error(st + " multiplatform dict")
+ # self.log.error(st + " multiplatform dict")
#
# item["namespace"] = item["namespace"].replace('{site}',
# site_name)
@@ -1148,7 +1148,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
if self.enabled:
self.sync_server_thread.start()
else:
- log.info("No presets or active providers. " +
+ self.log.info("No presets or active providers. " +
"Synchronization not possible.")
def tray_exit(self):
@@ -1166,12 +1166,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
if not self.is_running:
return
try:
- log.info("Stopping sync server server")
+ self.log.info("Stopping sync server server")
self.sync_server_thread.is_running = False
self.sync_server_thread.stop()
- log.info("Sync server stopped")
+ self.log.info("Sync server stopped")
except Exception:
- log.warning(
+ self.log.warning(
"Error has happened during Killing sync server",
exc_info=True
)
@@ -1256,7 +1256,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
sync_project_settings[project_name] = proj_settings
if not sync_project_settings:
- log.info("No enabled and configured projects for sync.")
+ self.log.info("No enabled and configured projects for sync.")
return sync_project_settings
def get_sync_project_setting(self, project_name, exclude_locals=False,
@@ -1387,7 +1387,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
Returns:
(list) of dictionaries
"""
- log.debug("Check representations for : {}".format(project_name))
+ self.log.debug("Check representations for : {}".format(project_name))
self.connection.Session["AVALON_PROJECT"] = project_name
# retry_cnt - number of attempts to sync specific file before giving up
retries_arr = self._get_retries_arr(project_name)
@@ -1466,9 +1466,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
}},
{"$sort": {'priority': -1, '_id': 1}},
]
- log.debug("active_site:{} - remote_site:{}".format(active_site,
- remote_site))
- log.debug("query: {}".format(aggr))
+ self.log.debug("active_site:{} - remote_site:{}".format(
+ active_site, remote_site
+ ))
+ self.log.debug("query: {}".format(aggr))
representations = self.connection.aggregate(aggr)
return representations
@@ -1503,7 +1504,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
if get_local_site_id() not in (local_site, remote_site):
# don't do upload/download for studio sites
- log.debug("No local site {} - {}".format(local_site, remote_site))
+ self.log.debug(
+ "No local site {} - {}".format(local_site, remote_site)
+ )
return SyncStatus.DO_NOTHING
_, remote_rec = self._get_site_rec(sites, remote_site) or {}
@@ -1594,11 +1597,16 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
error_str = ''
source_file = file.get("path", "")
- log.debug("File for {} - {source_file} process {status} {error_str}".
- format(representation_id,
- status=status,
- source_file=source_file,
- error_str=error_str))
+ self.log.debug(
+ (
+ "File for {} - {source_file} process {status} {error_str}"
+ ).format(
+ representation_id,
+ status=status,
+ source_file=source_file,
+ error_str=error_str
+ )
+ )
def _get_file_info(self, files, _id):
"""
@@ -1772,7 +1780,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
break
if not found:
msg = "Site {} not found".format(site_name)
- log.info(msg)
+ self.log.info(msg)
raise ValueError(msg)
update = {
@@ -1799,7 +1807,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
break
if not found:
msg = "Site {} not found".format(site_name)
- log.info(msg)
+ self.log.info(msg)
raise ValueError(msg)
if pause:
@@ -1834,7 +1842,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
reset_existing = False
files = representation.get("files", [])
if not files:
- log.debug("No files for {}".format(representation_id))
+ self.log.debug("No files for {}".format(representation_id))
return
for repre_file in files:
@@ -1851,7 +1859,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
reset_existing = True
else:
msg = "Site {} already present".format(site_name)
- log.info(msg)
+ self.log.info(msg)
raise SiteAlreadyPresentError(msg)
if reset_existing:
@@ -1951,16 +1959,19 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
self.widget = SyncServerWindow(self)
no_errors = True
except ValueError:
- log.info("No system setting for sync. Not syncing.", exc_info=True)
+ self.log.info(
+ "No system setting for sync. Not syncing.", exc_info=True
+ )
except KeyError:
- log.info((
+ self.log.info((
"There are not set presets for SyncServer OR "
"Credentials provided are invalid, "
"no syncing possible").
format(str(self.sync_project_settings)), exc_info=True)
except:
- log.error("Uncaught exception durin start of SyncServer",
- exc_info=True)
+ self.log.error(
+ "Uncaught exception durin start of SyncServer",
+ exc_info=True)
self.enabled = no_errors
self.widget.show()
diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py
index 96fad6a247..9b9768327e 100644
--- a/openpype/modules/sync_server/tray/app.py
+++ b/openpype/modules/sync_server/tray/app.py
@@ -2,7 +2,6 @@ from Qt import QtWidgets, QtCore, QtGui
from openpype.tools.settings import style
-from openpype.lib import PypeLogger
from openpype import resources
from .widgets import (
@@ -10,8 +9,6 @@ from .widgets import (
SyncRepresentationSummaryWidget
)
-log = PypeLogger().get_logger("SyncServer")
-
class SyncServerWindow(QtWidgets.QDialog):
"""
diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py
index 5ab809a816..988eb40d28 100644
--- a/openpype/modules/sync_server/tray/delegates.py
+++ b/openpype/modules/sync_server/tray/delegates.py
@@ -1,8 +1,7 @@
import os
from Qt import QtCore, QtWidgets, QtGui
-from openpype.lib import PypeLogger
-from . import lib
+from openpype.lib import Logger
from openpype.tools.utils.constants import (
LOCAL_PROVIDER_ROLE,
@@ -16,7 +15,7 @@ from openpype.tools.utils.constants import (
EDIT_ICON_ROLE
)
-log = PypeLogger().get_logger("SyncServer")
+log = Logger.get_logger("SyncServer")
class PriorityDelegate(QtWidgets.QStyledItemDelegate):
diff --git a/openpype/modules/sync_server/tray/lib.py b/openpype/modules/sync_server/tray/lib.py
index 87344be634..ff93815639 100644
--- a/openpype/modules/sync_server/tray/lib.py
+++ b/openpype/modules/sync_server/tray/lib.py
@@ -2,11 +2,6 @@ import attr
import abc
import six
-from openpype.lib import PypeLogger
-
-
-log = PypeLogger().get_logger("SyncServer")
-
STATUS = {
0: 'In Progress',
1: 'Queued',
diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py
index 629c4cbbf1..d63d046508 100644
--- a/openpype/modules/sync_server/tray/models.py
+++ b/openpype/modules/sync_server/tray/models.py
@@ -9,8 +9,7 @@ import qtawesome
from openpype.tools.utils.delegates import pretty_timestamp
-from openpype.lib import PypeLogger
-from openpype.api import get_local_site_id
+from openpype.lib import Logger, get_local_site_id
from openpype.client import get_representation_by_id
from . import lib
@@ -33,7 +32,7 @@ from openpype.tools.utils.constants import (
)
-log = PypeLogger().get_logger("SyncServer")
+log = Logger.get_logger("SyncServer")
class _SyncRepresentationModel(QtCore.QAbstractTableModel):
diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py
index b4ee447ac4..c40aa98f24 100644
--- a/openpype/modules/sync_server/tray/widgets.py
+++ b/openpype/modules/sync_server/tray/widgets.py
@@ -9,8 +9,7 @@ import qtawesome
from openpype.tools.settings import style
-from openpype.api import get_local_site_id
-from openpype.lib import PypeLogger
+from openpype.lib import Logger, get_local_site_id
from openpype.tools.utils.delegates import pretty_timestamp
@@ -36,7 +35,7 @@ from openpype.tools.utils.constants import (
TRIES_ROLE
)
-log = PypeLogger().get_logger("SyncServer")
+log = Logger.get_logger("SyncServer")
class SyncProjectListWidget(QtWidgets.QWidget):
diff --git a/openpype/modules/sync_server/utils.py b/openpype/modules/sync_server/utils.py
index 03f362202f..4caa01e9d7 100644
--- a/openpype/modules/sync_server/utils.py
+++ b/openpype/modules/sync_server/utils.py
@@ -1,6 +1,8 @@
import time
-from openpype.api import Logger
-log = Logger().get_logger("SyncServer")
+
+from openpype.lib import Logger
+
+log = Logger.get_logger("SyncServer")
class ResumableError(Exception):
diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py
index 9ec27e659b..7242761143 100644
--- a/openpype/modules/timers_manager/idle_threads.py
+++ b/openpype/modules/timers_manager/idle_threads.py
@@ -2,7 +2,7 @@ import time
from Qt import QtCore
from pynput import mouse, keyboard
-from openpype.lib import PypeLogger
+from openpype.lib import Logger
class IdleItem:
@@ -31,7 +31,7 @@ class IdleManager(QtCore.QThread):
def __init__(self):
super(IdleManager, self).__init__()
- self.log = PypeLogger.get_logger(self.__class__.__name__)
+ self.log = Logger.get_logger(self.__class__.__name__)
self.signal_reset_timer.connect(self._reset_time)
self.idle_item = IdleItem()
diff --git a/openpype/modules/timers_manager/rest_api.py b/openpype/modules/timers_manager/rest_api.py
index f16cb316c3..4a2e9e6575 100644
--- a/openpype/modules/timers_manager/rest_api.py
+++ b/openpype/modules/timers_manager/rest_api.py
@@ -1,9 +1,7 @@
import json
from aiohttp.web_response import Response
-from openpype.api import Logger
-
-log = Logger().get_logger("Event processor")
+from openpype.lib import Logger
class TimersManagerModuleRestApi:
@@ -12,6 +10,7 @@ class TimersManagerModuleRestApi:
happens in Workfile app.
"""
def __init__(self, user_module, server_manager):
+ self._log = None
self.module = user_module
self.server_manager = server_manager
@@ -19,6 +18,12 @@ class TimersManagerModuleRestApi:
self.register()
+ @property
+ def log(self):
+ if self._log is None:
+ self._log = Logger.get_logger(self.__ckass__.__name__)
+ return self._log
+
def register(self):
self.server_manager.add_route(
"POST",
@@ -47,7 +52,7 @@ class TimersManagerModuleRestApi:
"Payload must contain fields 'project_name,"
" 'asset_name' and 'task_name'"
)
- log.error(msg)
+ self.log.error(msg)
return Response(status=400, message=msg)
self.module.stop_timers()
@@ -73,7 +78,7 @@ class TimersManagerModuleRestApi:
"Payload must contain fields 'project_name, 'asset_name',"
" 'task_name'"
)
- log.warning(message)
+ self.log.warning(message)
return Response(text=message, status=404)
time = self.module.get_task_time(project_name, asset_name, task_name)
diff --git a/openpype/modules/timers_manager/timers_manager.py b/openpype/modules/timers_manager/timers_manager.py
index 93332ace4f..c168e9534d 100644
--- a/openpype/modules/timers_manager/timers_manager.py
+++ b/openpype/modules/timers_manager/timers_manager.py
@@ -6,7 +6,6 @@ from openpype.client import get_asset_by_name
from openpype.modules import OpenPypeModule
from openpype_interfaces import (
ITrayService,
- ILaunchHookPaths,
IPluginPaths
)
from openpype.lib.events import register_event_callback
@@ -79,7 +78,6 @@ class ExampleTimersManagerConnector:
class TimersManager(
OpenPypeModule,
ITrayService,
- ILaunchHookPaths,
IPluginPaths
):
""" Handles about Timers.
@@ -185,12 +183,11 @@ class TimersManager(
)
def get_launch_hook_paths(self):
- """Implementation of `ILaunchHookPaths`."""
+ """Implementation for applications launch hooks."""
- return os.path.join(
- TIMER_MODULE_DIR,
- "launch_hooks"
- )
+ return [
+ os.path.join(TIMER_MODULE_DIR, "launch_hooks")
+ ]
def get_plugin_paths(self):
"""Implementation of `IPluginPaths`."""
diff --git a/openpype/modules/webserver/server.py b/openpype/modules/webserver/server.py
index 82b681f406..120925a362 100644
--- a/openpype/modules/webserver/server.py
+++ b/openpype/modules/webserver/server.py
@@ -4,16 +4,16 @@ import asyncio
from aiohttp import web
-from openpype.lib import PypeLogger
+from openpype.lib import Logger
from .cors_middleware import cors_middleware
-log = PypeLogger.get_logger("WebServer")
-
class WebServerManager:
"""Manger that care about web server thread."""
def __init__(self, port=None, host=None):
+ self._log = None
+
self.port = port or 8079
self.host = host or "localhost"
@@ -33,6 +33,12 @@ class WebServerManager:
self.webserver_thread = WebServerThread(self)
+ @property
+ def log(self):
+ if self._log is None:
+ self._log = Logger.get_logger(self.__class__.__name__)
+ return self._log
+
@property
def url(self):
return "http://{}:{}".format(self.host, self.port)
@@ -51,12 +57,12 @@ class WebServerManager:
if not self.is_running:
return
try:
- log.debug("Stopping Web server")
+ self.log.debug("Stopping Web server")
self.webserver_thread.is_running = False
self.webserver_thread.stop()
except Exception:
- log.warning(
+ self.log.warning(
"Error has happened during Killing Web server",
exc_info=True
)
@@ -74,7 +80,10 @@ class WebServerManager:
class WebServerThread(threading.Thread):
""" Listener for requests in thread."""
+
def __init__(self, manager):
+ self._log = None
+
super(WebServerThread, self).__init__()
self.is_running = False
@@ -84,6 +93,12 @@ class WebServerThread(threading.Thread):
self.site = None
self.tasks = []
+ @property
+ def log(self):
+ if self._log is None:
+ self._log = Logger.get_logger(self.__class__.__name__)
+ return self._log
+
@property
def port(self):
return self.manager.port
@@ -96,13 +111,13 @@ class WebServerThread(threading.Thread):
self.is_running = True
try:
- log.info("Starting WebServer server")
+ self.log.info("Starting WebServer server")
self.loop = asyncio.new_event_loop() # create new loop for thread
asyncio.set_event_loop(self.loop)
self.loop.run_until_complete(self.start_server())
- log.debug(
+ self.log.debug(
"Running Web server on URL: \"localhost:{}\"".format(self.port)
)
@@ -110,7 +125,7 @@ class WebServerThread(threading.Thread):
self.loop.run_forever()
except Exception:
- log.warning(
+ self.log.warning(
"Web Server service has failed", exc_info=True
)
finally:
@@ -118,7 +133,7 @@ class WebServerThread(threading.Thread):
self.is_running = False
self.manager.thread_stopped()
- log.info("Web server stopped")
+ self.log.info("Web server stopped")
async def start_server(self):
""" Starts runner and TCPsite """
@@ -138,17 +153,17 @@ class WebServerThread(threading.Thread):
while self.is_running:
while self.tasks:
task = self.tasks.pop(0)
- log.debug("waiting for task {}".format(task))
+ self.log.debug("waiting for task {}".format(task))
await task
- log.debug("returned value {}".format(task.result))
+ self.log.debug("returned value {}".format(task.result))
await asyncio.sleep(0.5)
- log.debug("Starting shutdown")
+ self.log.debug("Starting shutdown")
await self.site.stop()
- log.debug("Site stopped")
+ self.log.debug("Site stopped")
await self.runner.cleanup()
- log.debug("Runner stopped")
+ self.log.debug("Runner stopped")
tasks = [
task
for task in asyncio.all_tasks()
@@ -156,7 +171,9 @@ class WebServerThread(threading.Thread):
]
list(map(lambda task: task.cancel(), tasks)) # cancel all the tasks
results = await asyncio.gather(*tasks, return_exceptions=True)
- log.debug(f'Finished awaiting cancelled tasks, results: {results}...')
+ self.log.debug(
+ f'Finished awaiting cancelled tasks, results: {results}...'
+ )
await self.loop.shutdown_asyncgens()
# to really make sure everything else has time to stop
await asyncio.sleep(0.07)
diff --git a/openpype/modules/webserver/webserver_module.py b/openpype/modules/webserver/webserver_module.py
index 686bd27bfd..16861abd29 100644
--- a/openpype/modules/webserver/webserver_module.py
+++ b/openpype/modules/webserver/webserver_module.py
@@ -53,9 +53,12 @@ class WebServerModule(OpenPypeModule, ITrayService):
try:
module.webserver_initialization(self.server_manager)
except Exception:
- self.log.warning((
- "Failed to connect module \"{}\" to webserver."
- ).format(module.name))
+ self.log.warning(
+ (
+ "Failed to connect module \"{}\" to webserver."
+ ).format(module.name),
+ exc_info=True
+ )
def tray_init(self):
self.create_server_manager()
diff --git a/openpype/pipeline/anatomy.py b/openpype/pipeline/anatomy.py
index 08db4749b3..cb6e07154b 100644
--- a/openpype/pipeline/anatomy.py
+++ b/openpype/pipeline/anatomy.py
@@ -14,9 +14,9 @@ from openpype.lib.path_templates import (
TemplatesDict,
FormatObject,
)
-from openpype.lib.log import PypeLogger
+from openpype.lib.log import Logger
-log = PypeLogger.get_logger(__name__)
+log = Logger.get_logger(__name__)
class ProjectNotSet(Exception):
diff --git a/openpype/pipeline/publish/lib.py b/openpype/pipeline/publish/lib.py
index d5494cd8a4..9060a0bf4b 100644
--- a/openpype/pipeline/publish/lib.py
+++ b/openpype/pipeline/publish/lib.py
@@ -273,3 +273,43 @@ def filter_pyblish_plugins(plugins):
option, value, plugin.__name__))
setattr(plugin, option, value)
+
+
+def find_close_plugin(close_plugin_name, log):
+ if close_plugin_name:
+ plugins = pyblish.api.discover()
+ for plugin in plugins:
+ if plugin.__name__ == close_plugin_name:
+ return plugin
+
+ log.debug("Close plugin not found, app might not close.")
+
+
+def remote_publish(log, close_plugin_name=None, raise_error=False):
+ """Loops through all plugins, logs to console. Used for tests.
+
+ Args:
+ log (openpype.lib.Logger)
+ close_plugin_name (str): name of plugin with responsibility to
+ close host app
+ """
+ # Error exit as soon as any error occurs.
+ error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
+
+ close_plugin = find_close_plugin(close_plugin_name, log)
+
+ for result in pyblish.util.publish_iter():
+ for record in result["records"]:
+ log.info("{}: {}".format(
+ result["plugin"].label, record.msg))
+
+ if result["error"]:
+ error_message = error_format.format(**result)
+ log.error(error_message)
+ if close_plugin: # close host app explicitly after error
+ context = pyblish.api.Context()
+ close_plugin().process(context)
+ if raise_error:
+ # Fatal Error is because of Deadline
+ error_message = "Fatal Error: " + error_format.format(**result)
+ raise RuntimeError(error_message)
diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py
index 66bf5e9bb4..fe46a4bc54 100644
--- a/openpype/pype_commands.py
+++ b/openpype/pype_commands.py
@@ -5,20 +5,6 @@ import sys
import json
import time
-from openpype.lib import PypeLogger
-from openpype.api import get_app_environments_for_context
-from openpype.lib.plugin_tools import get_batch_asset_task_info
-from openpype.lib.remote_publish import (
- get_webpublish_conn,
- start_webpublish_log,
- publish_and_log,
- fail_batch,
- find_variant_key,
- get_task_data,
- get_timeout,
- IN_PROGRESS_STATUS
-)
-
class PypeCommands:
"""Class implementing commands used by Pype.
@@ -27,10 +13,11 @@ class PypeCommands:
"""
@staticmethod
def launch_tray():
- PypeLogger.set_process_name("Tray")
-
+ from openpype.lib import Logger
from openpype.tools import tray
+ Logger.set_process_name("Tray")
+
tray.main()
@staticmethod
@@ -47,10 +34,12 @@ class PypeCommands:
@staticmethod
def add_modules(click_func):
"""Modules/Addons can add their cli commands dynamically."""
+
+ from openpype.lib import Logger
from openpype.modules import ModulesManager
manager = ModulesManager()
- log = PypeLogger.get_logger("AddModulesCLI")
+ log = Logger.get_logger("CLI-AddModules")
for module in manager.modules:
try:
module.cli(click_func)
@@ -72,8 +61,8 @@ class PypeCommands:
@staticmethod
def launch_webpublisher_webservercli(*args, **kwargs):
- from openpype.hosts.webpublisher.webserver_service.webserver_cli \
- import (run_webserver)
+ from openpype.hosts.webpublisher.webserver_service import run_webserver
+
return run_webserver(*args, **kwargs)
@staticmethod
@@ -96,10 +85,11 @@ class PypeCommands:
Raises:
RuntimeError: When there is no path to process.
"""
+
+ from openpype.lib import Logger
+ from openpype.lib.applications import get_app_environments_for_context
from openpype.modules import ModulesManager
from openpype.pipeline import install_openpype_plugins
-
- from openpype.api import Logger
from openpype.tools.utils.host_tools import show_publish
from openpype.tools.utils.lib import qt_app_context
@@ -107,7 +97,7 @@ class PypeCommands:
import pyblish.api
import pyblish.util
- log = Logger.get_logger()
+ log = Logger.get_logger("CLI-publish")
install_openpype_plugins()
@@ -195,92 +185,14 @@ class PypeCommands:
targets (list): Pyblish targets
(to choose validator for example)
"""
- import pyblish.api
- from openpype.api import Logger
- from openpype.lib import ApplicationManager
- log = Logger.get_logger()
-
- log.info("remotepublishphotoshop command")
-
- task_data = get_task_data(batch_path)
-
- workfile_path = os.path.join(batch_path,
- task_data["task"],
- task_data["files"][0])
-
- print("workfile_path {}".format(workfile_path))
-
- batch_id = task_data["batch"]
- dbcon = get_webpublish_conn()
- # safer to start logging here, launch might be broken altogether
- _id = start_webpublish_log(dbcon, batch_id, user_email)
-
- batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS}))
- if len(batches_in_progress) > 1:
- running_batches = [str(batch["_id"])
- for batch in batches_in_progress
- if batch["_id"] != _id]
- msg = "There are still running batches {}\n". \
- format("\n".join(running_batches))
- msg += "Ask admin to check them and reprocess current batch"
- fail_batch(_id, dbcon, msg)
- print("Another batch running, probably stuck, ask admin for help")
-
- asset_name, task_name, task_type = get_batch_asset_task_info(
- task_data["context"])
-
- application_manager = ApplicationManager()
- found_variant_key = find_variant_key(application_manager, host_name)
- app_name = "{}/{}".format(host_name, found_variant_key)
-
- # must have for proper launch of app
- env = get_app_environments_for_context(
- project_name,
- asset_name,
- task_name,
- app_name
+ from openpype.hosts.webpublisher.cli_functions import (
+ cli_publish_from_app
)
- print("env:: {}".format(env))
- os.environ.update(env)
- os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
- # must pass identifier to update log lines for a batch
- os.environ["BATCH_LOG_ID"] = str(_id)
- os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
- os.environ["USER_EMAIL"] = user_email
-
- pyblish.api.register_host(host_name)
- if targets:
- if isinstance(targets, str):
- targets = [targets]
- current_targets = os.environ.get("PYBLISH_TARGETS", "").split(
- os.pathsep)
- for target in targets:
- current_targets.append(target)
-
- os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
- set(current_targets))
-
- data = {
- "last_workfile_path": workfile_path,
- "start_last_workfile": True,
- "project_name": project_name,
- "asset_name": asset_name,
- "task_name": task_name
- }
-
- launched_app = application_manager.launch(app_name, **data)
-
- timeout = get_timeout(project_name, host_name, task_type)
-
- time_start = time.time()
- while launched_app.poll() is None:
- time.sleep(0.5)
- if time.time() - time_start > timeout:
- launched_app.terminate()
- msg = "Timeout reached"
- fail_batch(_id, dbcon, msg)
+ cli_publish_from_app(
+ project_name, batch_path, host_name, user_email, targets
+ )
@staticmethod
def remotepublish(project, batch_path, user_email, targets=None):
@@ -304,46 +216,12 @@ class PypeCommands:
Raises:
RuntimeError: When there is no path to process.
"""
- if not batch_path:
- raise RuntimeError("No publish paths specified")
- # Register target and host
- import pyblish.api
- import pyblish.util
+ from openpype.hosts.webpublisher.cli_functions import (
+ cli_publish
+ )
- from openpype.pipeline import install_host
- from openpype.hosts.webpublisher import api as webpublisher
-
- log = PypeLogger.get_logger()
-
- log.info("remotepublish command")
-
- host_name = "webpublisher"
- os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
- os.environ["AVALON_PROJECT"] = project
- os.environ["AVALON_APP"] = host_name
- os.environ["USER_EMAIL"] = user_email
- os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
-
- pyblish.api.register_host(host_name)
-
- if targets:
- if isinstance(targets, str):
- targets = [targets]
- for target in targets:
- pyblish.api.register_target(target)
-
- install_host(webpublisher)
-
- log.info("Running publish ...")
-
- _, batch_id = os.path.split(batch_path)
- dbcon = get_webpublish_conn()
- _id = start_webpublish_log(dbcon, batch_id, user_email)
-
- publish_and_log(dbcon, _id, log, batch_id=batch_id)
-
- log.info("Publish finished.")
+ cli_publish(project, batch_path, user_email, targets)
@staticmethod
def extractenvironments(output_json_path, project, asset, task, app,
@@ -352,8 +230,10 @@ class PypeCommands:
Called by Deadline plugin to propagate environment into render jobs.
"""
+
+ from openpype.lib.applications import get_app_environments_for_context
+
if all((project, asset, task, app)):
- from openpype.api import get_app_environments_for_context
env = get_app_environments_for_context(
project, asset, task, app, env_group
)
@@ -455,7 +335,6 @@ class PypeCommands:
sync_server_module.server_init()
sync_server_module.server_start()
- import time
while True:
time.sleep(1.0)
diff --git a/openpype/scripts/remote_publish.py b/openpype/scripts/remote_publish.py
index d322f369d1..37df35e36c 100644
--- a/openpype/scripts/remote_publish.py
+++ b/openpype/scripts/remote_publish.py
@@ -1,11 +1,12 @@
try:
- from openpype.api import Logger
- import openpype.lib.remote_publish
+ from openpype.lib import Logger
+ from openpype.pipeline.publish.lib import remote_publish
except ImportError as exc:
# Ensure Deadline fails by output an error that contains "Fatal Error:"
raise ImportError("Fatal Error: %s" % exc)
+
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
- openpype.lib.remote_publish.publish(log, raise_error=True)
+ remote_publish(log, raise_error=True)
diff --git a/openpype/settings/defaults/project_settings/nuke.json b/openpype/settings/defaults/project_settings/nuke.json
index 3e29122074..f40ec1fe9e 100644
--- a/openpype/settings/defaults/project_settings/nuke.json
+++ b/openpype/settings/defaults/project_settings/nuke.json
@@ -131,7 +131,7 @@
"write"
]
},
- "ValidateInstanceInContext": {
+ "ValidateCorrectAssetName": {
"enabled": true,
"optional": true,
"active": true
diff --git a/openpype/settings/defaults/project_settings/photoshop.json b/openpype/settings/defaults/project_settings/photoshop.json
index 758ac64a35..552c2c9cad 100644
--- a/openpype/settings/defaults/project_settings/photoshop.json
+++ b/openpype/settings/defaults/project_settings/photoshop.json
@@ -8,7 +8,7 @@
},
"publish": {
"CollectColorCodedInstances": {
- "create_flatten_image": false,
+ "create_flatten_image": "no",
"flatten_subset_template": "",
"color_code_mapping": []
},
diff --git a/openpype/settings/entities/base_entity.py b/openpype/settings/entities/base_entity.py
index 741f13c49b..f28fefdf5a 100644
--- a/openpype/settings/entities/base_entity.py
+++ b/openpype/settings/entities/base_entity.py
@@ -15,7 +15,7 @@ from .exceptions import (
EntitySchemaError
)
-from openpype.lib import PypeLogger
+from openpype.lib import Logger
@six.add_metaclass(ABCMeta)
@@ -478,7 +478,7 @@ class BaseItemEntity(BaseEntity):
def log(self):
"""Auto created logger for debugging or warnings."""
if self._log is None:
- self._log = PypeLogger.get_logger(self.__class__.__name__)
+ self._log = Logger.get_logger(self.__class__.__name__)
return self._log
@abstractproperty
diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json
index 49860301b6..7aa49c99a4 100644
--- a/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json
+++ b/openpype/settings/entities/schemas/projects_schema/schema_project_photoshop.json
@@ -45,9 +45,15 @@
"label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)"
},
{
- "type": "boolean",
"key": "create_flatten_image",
- "label": "Create flatten image"
+ "label": "Create flatten image",
+ "type": "enum",
+ "multiselection": false,
+ "enum_items": [
+ { "flatten_with_images": "Flatten with images" },
+ { "flatten_only": "Flatten only" },
+ { "no": "No" }
+ ]
},
{
"type": "text",
diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
index 575bfe79e7..e5827a92c4 100644
--- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
+++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_nuke_publish.json
@@ -61,8 +61,8 @@
"name": "template_publish_plugin",
"template_data": [
{
- "key": "ValidateInstanceInContext",
- "label": "Validate Instance In Context"
+ "key": "ValidateCorrectAssetName",
+ "label": "Validate Correct Asset name"
}
]
},