mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #3740 from pypeclub/feature/OP-3793_Webpublisher-as-addon
Webpublisher: Webpublisher is used as addon
This commit is contained in:
commit
41689a337e
18 changed files with 508 additions and 322 deletions
|
|
@ -3,11 +3,12 @@ import sys
|
|||
import contextlib
|
||||
import traceback
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.lib.remote_publish import headless_publish
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
from openpype.tools.utils import host_tools
|
||||
from .launch_logic import ProcessLauncher, get_stub
|
||||
|
|
@ -35,10 +36,18 @@ def main(*subprocess_args):
|
|||
launcher.start()
|
||||
|
||||
if os.environ.get("HEADLESS_PUBLISH"):
|
||||
launcher.execute_in_main_thread(lambda: headless_publish(
|
||||
log,
|
||||
"CloseAE",
|
||||
os.environ.get("IS_TEST")))
|
||||
manager = ModulesManager()
|
||||
webpublisher_addon = manager["webpublisher"]
|
||||
|
||||
launcher.execute_in_main_thread(
|
||||
partial(
|
||||
webpublisher_addon.headless_publish,
|
||||
log,
|
||||
"CloseAE",
|
||||
os.environ.get("IS_TEST")
|
||||
)
|
||||
)
|
||||
|
||||
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
|
||||
save = False
|
||||
if os.getenv("WORKFILES_SAVE_AS"):
|
||||
|
|
|
|||
|
|
@ -5,11 +5,10 @@ import traceback
|
|||
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import env_value_to_bool, Logger
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.lib.remote_publish import headless_publish
|
||||
from openpype.lib import env_value_to_bool
|
||||
|
||||
from .launch_logic import ProcessLauncher, stub
|
||||
|
||||
|
|
@ -35,8 +34,10 @@ def main(*subprocess_args):
|
|||
launcher.start()
|
||||
|
||||
if env_value_to_bool("HEADLESS_PUBLISH"):
|
||||
manager = ModulesManager()
|
||||
webpublisher_addon = manager["webpublisher"]
|
||||
launcher.execute_in_main_thread(
|
||||
headless_publish,
|
||||
webpublisher_addon.headless_publish,
|
||||
log,
|
||||
"ClosePS",
|
||||
os.environ.get("IS_TEST")
|
||||
|
|
|
|||
|
|
@ -17,11 +17,11 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib.plugin_tools import (
|
||||
parse_json,
|
||||
get_batch_asset_task_info
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype_modules.webpublisher.lib import (
|
||||
get_batch_asset_task_info,
|
||||
parse_json
|
||||
)
|
||||
|
||||
|
||||
class CollectBatchData(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
from .addon import (
|
||||
WebpublisherAddon,
|
||||
WEBPUBLISHER_ROOT_DIR,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"WebpublisherAddon",
|
||||
"WEBPUBLISHER_ROOT_DIR",
|
||||
)
|
||||
106
openpype/hosts/webpublisher/addon.py
Normal file
106
openpype/hosts/webpublisher/addon.py
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import os
|
||||
|
||||
import click
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostModule
|
||||
|
||||
WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class WebpublisherAddon(OpenPypeModule, IHostModule):
|
||||
name = "webpublisher"
|
||||
host_name = "webpublisher"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def headless_publish(self, log, close_plugin_name=None, is_test=False):
|
||||
"""Runs publish in a opened host with a context.
|
||||
|
||||
Close Python process at the end.
|
||||
"""
|
||||
|
||||
from openpype.pipeline.publish.lib import remote_publish
|
||||
from .lib import get_webpublish_conn, publish_and_log
|
||||
|
||||
if is_test:
|
||||
remote_publish(log, close_plugin_name)
|
||||
return
|
||||
|
||||
dbcon = get_webpublish_conn()
|
||||
_id = os.environ.get("BATCH_LOG_ID")
|
||||
if not _id:
|
||||
log.warning("Unable to store log records, "
|
||||
"batch will be unfinished!")
|
||||
return
|
||||
|
||||
publish_and_log(
|
||||
dbcon, _id, log, close_plugin_name=close_plugin_name
|
||||
)
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(
|
||||
WebpublisherAddon.name,
|
||||
help="Webpublisher related commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def publish(project, path, user=None, targets=None):
|
||||
"""Start publishing (Inner command).
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
|
||||
from .publish_functions import cli_publish
|
||||
|
||||
cli_publish(project, path, user, targets)
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.argument("path")
|
||||
@click.option("-p", "--project", help="Project")
|
||||
@click.option("-h", "--host", help="Host")
|
||||
@click.option("-u", "--user", help="User email address")
|
||||
@click.option("-t", "--targets", help="Targets", default=None,
|
||||
multiple=True)
|
||||
def publishfromapp(project, path, host, user=None, targets=None):
|
||||
"""Start publishing through application (Inner command).
|
||||
|
||||
Publish collects json from paths provided as an argument.
|
||||
More than one path is allowed.
|
||||
"""
|
||||
|
||||
from .publish_functions import cli_publish_from_app
|
||||
|
||||
cli_publish_from_app(project, path, host, user, targets)
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.option("-e", "--executable", help="Executable")
|
||||
@click.option("-u", "--upload_dir", help="Upload dir")
|
||||
@click.option("-h", "--host", help="Host", default=None)
|
||||
@click.option("-p", "--port", help="Port", default=None)
|
||||
def webserver(executable, upload_dir, host=None, port=None):
|
||||
"""Start service for communication with Webpublish Front end.
|
||||
|
||||
OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
|
||||
FTRACK_BOT_API_KEY provided with api key from Ftrack.
|
||||
|
||||
Expect "pype.club" user created on Ftrack.
|
||||
"""
|
||||
|
||||
from .webserver_service import run_webserver
|
||||
|
||||
run_webserver(executable, upload_dir, host, port)
|
||||
|
|
@ -1,31 +1,23 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from pyblish import api as pyblish
|
||||
import openpype.hosts.webpublisher
|
||||
from openpype.pipeline import legacy_io
|
||||
import pyblish.api
|
||||
|
||||
from openpype.host import HostBase
|
||||
from openpype.hosts.webpublisher import WEBPUBLISHER_ROOT_DIR
|
||||
|
||||
log = logging.getLogger("openpype.hosts.webpublisher")
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(
|
||||
openpype.hosts.webpublisher.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
|
||||
class WebpublisherHost(HostBase):
|
||||
name = "webpublisher"
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
def install(self):
|
||||
print("Installing Pype config...")
|
||||
pyblish.api.register_host(self.name)
|
||||
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
legacy_io.install()
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
|
||||
|
||||
# to have required methods for interface
|
||||
def ls():
|
||||
pass
|
||||
publish_plugin_dir = os.path.join(
|
||||
WEBPUBLISHER_ROOT_DIR, "plugins", "publish"
|
||||
)
|
||||
pyblish.api.register_plugin_path(publish_plugin_dir)
|
||||
self.log.info(publish_plugin_dir)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
import collections
|
||||
import json
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
|
@ -8,9 +9,10 @@ import pyblish.util
|
|||
import pyblish.api
|
||||
|
||||
from openpype.client.mongo import OpenPypeMongoConnection
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.pipeline.publish.lib import find_close_plugin
|
||||
|
||||
ERROR_STATUS = "error"
|
||||
IN_PROGRESS_STATUS = "in_progress"
|
||||
|
|
@ -19,21 +21,51 @@ SENT_REPROCESSING_STATUS = "sent_for_reprocessing"
|
|||
FINISHED_REPROCESS_STATUS = "republishing_finished"
|
||||
FINISHED_OK_STATUS = "finished_ok"
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
def headless_publish(log, close_plugin_name=None, is_test=False):
|
||||
"""Runs publish in a opened host with a context and closes Python process.
|
||||
|
||||
def parse_json(path):
|
||||
"""Parses json file at 'path' location
|
||||
|
||||
Returns:
|
||||
(dict) or None if unparsable
|
||||
Raises:
|
||||
AsssertionError if 'path' doesn't exist
|
||||
"""
|
||||
if not is_test:
|
||||
dbcon = get_webpublish_conn()
|
||||
_id = os.environ.get("BATCH_LOG_ID")
|
||||
if not _id:
|
||||
log.warning("Unable to store log records, "
|
||||
"batch will be unfinished!")
|
||||
return
|
||||
path = path.strip('\"')
|
||||
assert os.path.isfile(path), (
|
||||
"Path to json file doesn't exist. \"{}\"".format(path)
|
||||
)
|
||||
data = None
|
||||
with open(path, "r") as json_file:
|
||||
try:
|
||||
data = json.load(json_file)
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Error loading json: {} - Exception: {}".format(path, exc)
|
||||
)
|
||||
return data
|
||||
|
||||
publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name)
|
||||
|
||||
def get_batch_asset_task_info(ctx):
|
||||
"""Parses context data from webpublisher's batch metadata
|
||||
|
||||
Returns:
|
||||
(tuple): asset, task_name (Optional), task_type
|
||||
"""
|
||||
task_type = "default_task_type"
|
||||
task_name = None
|
||||
asset = None
|
||||
|
||||
if ctx["type"] == "task":
|
||||
items = ctx["path"].split('/')
|
||||
asset = items[-2]
|
||||
task_name = ctx["name"]
|
||||
task_type = ctx["attributes"]["type"]
|
||||
else:
|
||||
publish(log, close_plugin_name)
|
||||
asset = ctx["name"]
|
||||
|
||||
return asset, task_name, task_type
|
||||
|
||||
|
||||
def get_webpublish_conn():
|
||||
|
|
@ -62,36 +94,6 @@ def start_webpublish_log(dbcon, batch_id, user):
|
|||
}).inserted_id
|
||||
|
||||
|
||||
def publish(log, close_plugin_name=None, raise_error=False):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (openpype.lib.Logger)
|
||||
close_plugin_name (str): name of plugin with responsibility to
|
||||
close host app
|
||||
"""
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
close_plugin = _get_close_plugin(close_plugin_name, log)
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
for record in result["records"]:
|
||||
log.info("{}: {}".format(
|
||||
result["plugin"].label, record.msg))
|
||||
|
||||
if result["error"]:
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
if raise_error:
|
||||
# Fatal Error is because of Deadline
|
||||
error_message = "Fatal Error: " + error_format.format(**result)
|
||||
raise RuntimeError(error_message)
|
||||
|
||||
|
||||
def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
|
||||
"""Loops through all plugins, logs ok and fails into OP DB.
|
||||
|
||||
|
|
@ -107,7 +109,7 @@ def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
|
|||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n"
|
||||
error_format += "-" * 80 + "\n"
|
||||
|
||||
close_plugin = _get_close_plugin(close_plugin_name, log)
|
||||
close_plugin = find_close_plugin(close_plugin_name, log)
|
||||
|
||||
if isinstance(_id, str):
|
||||
_id = ObjectId(_id)
|
||||
|
|
@ -226,16 +228,6 @@ def find_variant_key(application_manager, host):
|
|||
return found_variant_key
|
||||
|
||||
|
||||
def _get_close_plugin(close_plugin_name, log):
|
||||
if close_plugin_name:
|
||||
plugins = pyblish.api.discover()
|
||||
for plugin in plugins:
|
||||
if plugin.__name__ == close_plugin_name:
|
||||
return plugin
|
||||
|
||||
log.debug("Close plugin not found, app might not close.")
|
||||
|
||||
|
||||
def get_task_data(batch_dir):
|
||||
"""Return parsed data from first task manifest.json
|
||||
|
||||
|
|
@ -13,12 +13,13 @@ import os
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib.plugin_tools import (
|
||||
parse_json,
|
||||
get_batch_asset_task_info
|
||||
)
|
||||
from openpype.lib.remote_publish import get_webpublish_conn, IN_PROGRESS_STATUS
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype_modules.webpublisher.lib import (
|
||||
parse_json,
|
||||
get_batch_asset_task_info,
|
||||
get_webpublish_conn,
|
||||
IN_PROGRESS_STATUS
|
||||
)
|
||||
|
||||
|
||||
class CollectBatchData(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -23,10 +23,8 @@ from openpype.lib import (
|
|||
get_ffprobe_streams,
|
||||
convert_ffprobe_fps_value,
|
||||
)
|
||||
from openpype.lib.plugin_tools import (
|
||||
parse_json,
|
||||
get_subset_name_with_asset_doc
|
||||
)
|
||||
from openpype.lib.plugin_tools import get_subset_name_with_asset_doc
|
||||
from openpype_modules.webpublisher.lib import parse_json
|
||||
|
||||
|
||||
class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
|
|
@ -16,11 +16,11 @@ import uuid
|
|||
import json
|
||||
import shutil
|
||||
import pyblish.api
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
from openpype.hosts.tvpaint.worker import (
|
||||
SenderTVPaintCommands,
|
||||
CollectSceneData
|
||||
)
|
||||
from openpype_modules.webpublisher.lib import parse_json
|
||||
|
||||
|
||||
class CollectTVPaintWorkfileData(pyblish.api.ContextPlugin):
|
||||
|
|
|
|||
205
openpype/hosts/webpublisher/publish_functions.py
Normal file
205
openpype/hosts/webpublisher/publish_functions.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
import os
|
||||
import time
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.applications import (
|
||||
ApplicationManager,
|
||||
get_app_environments_for_context,
|
||||
)
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.webpublisher.api import WebpublisherHost
|
||||
|
||||
from .lib import (
|
||||
get_batch_asset_task_info,
|
||||
get_webpublish_conn,
|
||||
start_webpublish_log,
|
||||
publish_and_log,
|
||||
fail_batch,
|
||||
find_variant_key,
|
||||
get_task_data,
|
||||
get_timeout,
|
||||
IN_PROGRESS_STATUS
|
||||
)
|
||||
|
||||
|
||||
def cli_publish(project_name, batch_path, user_email, targets):
|
||||
"""Start headless publishing.
|
||||
|
||||
Used to publish rendered assets, workfiles etc via Webpublisher.
|
||||
Eventually should be yanked out to Webpublisher cli.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
|
||||
Args:
|
||||
project_name (str): project to publish (only single context is
|
||||
expected per call of remotepublish
|
||||
batch_path (str): Path batch folder. Contains subfolders with
|
||||
resources (workfile, another subfolder 'renders' etc.)
|
||||
user_email (string): email address for webpublisher - used to
|
||||
find Ftrack user with same email
|
||||
targets (list): Pyblish targets
|
||||
(to choose validator for example)
|
||||
|
||||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
"""
|
||||
|
||||
if not batch_path:
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
log = Logger.get_logger("remotepublish")
|
||||
log.info("remotepublish command")
|
||||
|
||||
# Register target and host
|
||||
webpublisher_host = WebpublisherHost()
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
os.environ["AVALON_APP"] = webpublisher_host.name
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
targets = [targets]
|
||||
for target in targets:
|
||||
pyblish.api.register_target(target)
|
||||
|
||||
install_host(webpublisher_host)
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
_, batch_id = os.path.split(batch_path)
|
||||
dbcon = get_webpublish_conn()
|
||||
_id = start_webpublish_log(dbcon, batch_id, user_email)
|
||||
|
||||
task_data = get_task_data(batch_path)
|
||||
if not task_data["context"]:
|
||||
msg = "Batch manifest must contain context data"
|
||||
msg += "Create new batch and set context properly."
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
publish_and_log(dbcon, _id, log, batch_id=batch_id)
|
||||
|
||||
log.info("Publish finished.")
|
||||
|
||||
|
||||
def cli_publish_from_app(
|
||||
project_name, batch_path, host_name, user_email, targets
|
||||
):
|
||||
"""Opens installed variant of 'host' and run remote publish there.
|
||||
|
||||
Eventually should be yanked out to Webpublisher cli.
|
||||
|
||||
Currently implemented and tested for Photoshop where customer
|
||||
wants to process uploaded .psd file and publish collected layers
|
||||
from there. Triggered by Webpublisher.
|
||||
|
||||
Checks if no other batches are running (status =='in_progress). If
|
||||
so, it sleeps for SLEEP (this is separate process),
|
||||
waits for WAIT_FOR seconds altogether.
|
||||
|
||||
Requires installed host application on the machine.
|
||||
|
||||
Runs publish process as user would, in automatic fashion.
|
||||
|
||||
Args:
|
||||
project_name (str): project to publish (only single context is
|
||||
expected per call of remotepublish
|
||||
batch_path (str): Path batch folder. Contains subfolders with
|
||||
resources (workfile, another subfolder 'renders' etc.)
|
||||
host_name (str): 'photoshop'
|
||||
user_email (string): email address for webpublisher - used to
|
||||
find Ftrack user with same email
|
||||
targets (list): Pyblish targets
|
||||
(to choose validator for example)
|
||||
"""
|
||||
|
||||
log = Logger.get_logger("RemotePublishFromApp")
|
||||
|
||||
log.info("remotepublishphotoshop command")
|
||||
|
||||
task_data = get_task_data(batch_path)
|
||||
|
||||
workfile_path = os.path.join(batch_path,
|
||||
task_data["task"],
|
||||
task_data["files"][0])
|
||||
|
||||
print("workfile_path {}".format(workfile_path))
|
||||
|
||||
batch_id = task_data["batch"]
|
||||
dbcon = get_webpublish_conn()
|
||||
# safer to start logging here, launch might be broken altogether
|
||||
_id = start_webpublish_log(dbcon, batch_id, user_email)
|
||||
|
||||
batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS}))
|
||||
if len(batches_in_progress) > 1:
|
||||
running_batches = [str(batch["_id"])
|
||||
for batch in batches_in_progress
|
||||
if batch["_id"] != _id]
|
||||
msg = "There are still running batches {}\n". \
|
||||
format("\n".join(running_batches))
|
||||
msg += "Ask admin to check them and reprocess current batch"
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
if not task_data["context"]:
|
||||
msg = "Batch manifest must contain context data"
|
||||
msg += "Create new batch and set context properly."
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
asset_name, task_name, task_type = get_batch_asset_task_info(
|
||||
task_data["context"])
|
||||
|
||||
application_manager = ApplicationManager()
|
||||
found_variant_key = find_variant_key(application_manager, host_name)
|
||||
app_name = "{}/{}".format(host_name, found_variant_key)
|
||||
|
||||
# must have for proper launch of app
|
||||
env = get_app_environments_for_context(
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
app_name
|
||||
)
|
||||
print("env:: {}".format(env))
|
||||
os.environ.update(env)
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
# must pass identifier to update log lines for a batch
|
||||
os.environ["BATCH_LOG_ID"] = str(_id)
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
|
||||
pyblish.api.register_host(host_name)
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
targets = [targets]
|
||||
current_targets = os.environ.get("PYBLISH_TARGETS", "").split(
|
||||
os.pathsep)
|
||||
for target in targets:
|
||||
current_targets.append(target)
|
||||
|
||||
os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
|
||||
set(current_targets))
|
||||
|
||||
data = {
|
||||
"last_workfile_path": workfile_path,
|
||||
"start_last_workfile": True,
|
||||
"project_name": project_name,
|
||||
"asset_name": asset_name,
|
||||
"task_name": task_name
|
||||
}
|
||||
|
||||
launched_app = application_manager.launch(app_name, **data)
|
||||
|
||||
timeout = get_timeout(project_name, host_name, task_type)
|
||||
|
||||
time_start = time.time()
|
||||
while launched_app.poll() is None:
|
||||
time.sleep(0.5)
|
||||
if time.time() - time_start > timeout:
|
||||
launched_app.terminate()
|
||||
msg = "Timeout reached"
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
from .webserver import run_webserver
|
||||
|
||||
|
||||
__all__ = (
|
||||
"run_webserver",
|
||||
)
|
||||
|
|
@ -10,16 +10,17 @@ from aiohttp.web_response import Response
|
|||
from openpype.client import (
|
||||
get_projects,
|
||||
get_assets,
|
||||
OpenPypeMongoConnection,
|
||||
)
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.remote_publish import (
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype_modules.webserver.base_routes import RestApiEndpoint
|
||||
from openpype_modules.webpublisher import WebpublisherAddon
|
||||
from openpype_modules.webpublisher.lib import (
|
||||
get_webpublish_conn,
|
||||
get_task_data,
|
||||
ERROR_STATUS,
|
||||
REPROCESS_STATUS
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype_modules.webserver.base_routes import RestApiEndpoint
|
||||
|
||||
log = Logger.get_logger("WebpublishRoutes")
|
||||
|
||||
|
|
@ -77,9 +78,7 @@ class WebpublishRestApiResource(JsonApiResource):
|
|||
"""Resource carrying OP DB connection for storing batch info into DB."""
|
||||
|
||||
def __init__(self):
|
||||
mongo_client = OpenPypeMongoConnection.get_mongo_client()
|
||||
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
|
||||
self.dbcon = mongo_client[database_name]["webpublishes"]
|
||||
self.dbcon = get_webpublish_conn()
|
||||
|
||||
|
||||
class ProjectsEndpoint(ResourceRestApiEndpoint):
|
||||
|
|
@ -215,7 +214,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
# TVPaint filter
|
||||
{
|
||||
"extensions": [".tvpp"],
|
||||
"command": "remotepublish",
|
||||
"command": "publish",
|
||||
"arguments": {
|
||||
"targets": ["tvpaint_worker"]
|
||||
},
|
||||
|
|
@ -224,13 +223,13 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
# Photoshop filter
|
||||
{
|
||||
"extensions": [".psd", ".psb"],
|
||||
"command": "remotepublishfromapp",
|
||||
"command": "publishfromapp",
|
||||
"arguments": {
|
||||
# Command 'remotepublishfromapp' requires --host argument
|
||||
# Command 'publishfromapp' requires --host argument
|
||||
"host": "photoshop",
|
||||
# Make sure targets are set to None for cases that default
|
||||
# would change
|
||||
# - targets argument is not used in 'remotepublishfromapp'
|
||||
# - targets argument is not used in 'publishfromapp'
|
||||
"targets": ["remotepublish"]
|
||||
},
|
||||
# does publish need to be handled by a queue, eg. only
|
||||
|
|
@ -242,7 +241,7 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
batch_dir = os.path.join(self.resource.upload_dir, content["batch"])
|
||||
|
||||
# Default command and arguments
|
||||
command = "remotepublish"
|
||||
command = "publish"
|
||||
add_args = {
|
||||
# All commands need 'project' and 'user'
|
||||
"project": content["project_name"],
|
||||
|
|
@ -273,6 +272,8 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
|
||||
args = [
|
||||
openpype_app,
|
||||
"module",
|
||||
WebpublisherAddon.name,
|
||||
command,
|
||||
batch_dir
|
||||
]
|
||||
|
|
|
|||
|
|
@ -7,8 +7,15 @@ import json
|
|||
import subprocess
|
||||
|
||||
from openpype.client import OpenPypeMongoConnection
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.lib import Logger
|
||||
|
||||
from openpype_modules.webpublisher.lib import (
|
||||
ERROR_STATUS,
|
||||
REPROCESS_STATUS,
|
||||
SENT_REPROCESSING_STATUS
|
||||
)
|
||||
|
||||
from .webpublish_routes import (
|
||||
RestApiResource,
|
||||
WebpublishRestApiResource,
|
||||
|
|
@ -21,32 +28,29 @@ from .webpublish_routes import (
|
|||
TaskPublishEndpoint,
|
||||
UserReportEndpoint
|
||||
)
|
||||
from openpype.lib.remote_publish import (
|
||||
ERROR_STATUS,
|
||||
REPROCESS_STATUS,
|
||||
SENT_REPROCESSING_STATUS
|
||||
)
|
||||
|
||||
|
||||
log = Logger.get_logger("webserver_gui")
|
||||
|
||||
|
||||
def run_webserver(*args, **kwargs):
|
||||
def run_webserver(executable, upload_dir, host=None, port=None):
|
||||
"""Runs webserver in command line, adds routes."""
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
if not host:
|
||||
host = "localhost"
|
||||
if not port:
|
||||
port = 8079
|
||||
|
||||
manager = ModulesManager()
|
||||
webserver_module = manager.modules_by_name["webserver"]
|
||||
host = kwargs.get("host") or "localhost"
|
||||
port = kwargs.get("port") or 8079
|
||||
|
||||
server_manager = webserver_module.create_new_server_manager(port, host)
|
||||
webserver_url = server_manager.url
|
||||
# queue for remotepublishfromapp tasks
|
||||
studio_task_queue = collections.deque()
|
||||
|
||||
resource = RestApiResource(server_manager,
|
||||
upload_dir=kwargs["upload_dir"],
|
||||
executable=kwargs["executable"],
|
||||
upload_dir=upload_dir,
|
||||
executable=executable,
|
||||
studio_task_queue=studio_task_queue)
|
||||
projects_endpoint = ProjectsEndpoint(resource)
|
||||
server_manager.add_route(
|
||||
|
|
@ -111,7 +115,7 @@ def run_webserver(*args, **kwargs):
|
|||
last_reprocessed = time.time()
|
||||
while True:
|
||||
if time.time() - last_reprocessed > 20:
|
||||
reprocess_failed(kwargs["upload_dir"], webserver_url)
|
||||
reprocess_failed(upload_dir, webserver_url)
|
||||
last_reprocessed = time.time()
|
||||
if studio_task_queue:
|
||||
args = studio_task_queue.popleft()
|
||||
|
|
@ -427,48 +427,3 @@ def get_background_layers(file_url):
|
|||
layer.get("filename")).
|
||||
replace("\\", "/"))
|
||||
return layers
|
||||
|
||||
|
||||
def parse_json(path):
|
||||
"""Parses json file at 'path' location
|
||||
|
||||
Returns:
|
||||
(dict) or None if unparsable
|
||||
Raises:
|
||||
AsssertionError if 'path' doesn't exist
|
||||
"""
|
||||
path = path.strip('\"')
|
||||
assert os.path.isfile(path), (
|
||||
"Path to json file doesn't exist. \"{}\"".format(path)
|
||||
)
|
||||
data = None
|
||||
with open(path, "r") as json_file:
|
||||
try:
|
||||
data = json.load(json_file)
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Error loading json: "
|
||||
"{} - Exception: {}".format(path, exc)
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def get_batch_asset_task_info(ctx):
|
||||
"""Parses context data from webpublisher's batch metadata
|
||||
|
||||
Returns:
|
||||
(tuple): asset, task_name (Optional), task_type
|
||||
"""
|
||||
task_type = "default_task_type"
|
||||
task_name = None
|
||||
asset = None
|
||||
|
||||
if ctx["type"] == "task":
|
||||
items = ctx["path"].split('/')
|
||||
asset = items[-2]
|
||||
task_name = ctx["name"]
|
||||
task_type = ctx["attributes"]["type"]
|
||||
else:
|
||||
asset = ctx["name"]
|
||||
|
||||
return asset, task_name, task_type
|
||||
|
|
|
|||
|
|
@ -273,3 +273,43 @@ def filter_pyblish_plugins(plugins):
|
|||
option, value, plugin.__name__))
|
||||
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
def find_close_plugin(close_plugin_name, log):
|
||||
if close_plugin_name:
|
||||
plugins = pyblish.api.discover()
|
||||
for plugin in plugins:
|
||||
if plugin.__name__ == close_plugin_name:
|
||||
return plugin
|
||||
|
||||
log.debug("Close plugin not found, app might not close.")
|
||||
|
||||
|
||||
def remote_publish(log, close_plugin_name=None, raise_error=False):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (openpype.lib.Logger)
|
||||
close_plugin_name (str): name of plugin with responsibility to
|
||||
close host app
|
||||
"""
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
close_plugin = find_close_plugin(close_plugin_name, log)
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
for record in result["records"]:
|
||||
log.info("{}: {}".format(
|
||||
result["plugin"].label, record.msg))
|
||||
|
||||
if result["error"]:
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
if raise_error:
|
||||
# Fatal Error is because of Deadline
|
||||
error_message = "Fatal Error: " + error_format.format(**result)
|
||||
raise RuntimeError(error_message)
|
||||
|
|
|
|||
|
|
@ -5,19 +5,6 @@ import sys
|
|||
import json
|
||||
import time
|
||||
|
||||
from openpype.api import get_app_environments_for_context
|
||||
from openpype.lib.plugin_tools import get_batch_asset_task_info
|
||||
from openpype.lib.remote_publish import (
|
||||
get_webpublish_conn,
|
||||
start_webpublish_log,
|
||||
publish_and_log,
|
||||
fail_batch,
|
||||
find_variant_key,
|
||||
get_task_data,
|
||||
get_timeout,
|
||||
IN_PROGRESS_STATUS
|
||||
)
|
||||
|
||||
|
||||
class PypeCommands:
|
||||
"""Class implementing commands used by Pype.
|
||||
|
|
@ -74,8 +61,8 @@ class PypeCommands:
|
|||
|
||||
@staticmethod
|
||||
def launch_webpublisher_webservercli(*args, **kwargs):
|
||||
from openpype.hosts.webpublisher.webserver_service.webserver_cli \
|
||||
import (run_webserver)
|
||||
from openpype.hosts.webpublisher.webserver_service import run_webserver
|
||||
|
||||
return run_webserver(*args, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -100,6 +87,7 @@ class PypeCommands:
|
|||
"""
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.applications import get_app_environments_for_context
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import install_openpype_plugins
|
||||
from openpype.tools.utils.host_tools import show_publish
|
||||
|
|
@ -198,96 +186,13 @@ class PypeCommands:
|
|||
(to choose validator for example)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import ApplicationManager
|
||||
|
||||
from openpype.lib import Logger
|
||||
log = Logger.get_logger("CLI-remotepublishfromapp")
|
||||
|
||||
log.info("remotepublishphotoshop command")
|
||||
|
||||
task_data = get_task_data(batch_path)
|
||||
|
||||
workfile_path = os.path.join(batch_path,
|
||||
task_data["task"],
|
||||
task_data["files"][0])
|
||||
|
||||
print("workfile_path {}".format(workfile_path))
|
||||
|
||||
batch_id = task_data["batch"]
|
||||
dbcon = get_webpublish_conn()
|
||||
# safer to start logging here, launch might be broken altogether
|
||||
_id = start_webpublish_log(dbcon, batch_id, user_email)
|
||||
|
||||
batches_in_progress = list(dbcon.find({"status": IN_PROGRESS_STATUS}))
|
||||
if len(batches_in_progress) > 1:
|
||||
running_batches = [str(batch["_id"])
|
||||
for batch in batches_in_progress
|
||||
if batch["_id"] != _id]
|
||||
msg = "There are still running batches {}\n". \
|
||||
format("\n".join(running_batches))
|
||||
msg += "Ask admin to check them and reprocess current batch"
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
if not task_data["context"]:
|
||||
msg = "Batch manifest must contain context data"
|
||||
msg += "Create new batch and set context properly."
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
asset_name, task_name, task_type = get_batch_asset_task_info(
|
||||
task_data["context"])
|
||||
|
||||
application_manager = ApplicationManager()
|
||||
found_variant_key = find_variant_key(application_manager, host_name)
|
||||
app_name = "{}/{}".format(host_name, found_variant_key)
|
||||
|
||||
# must have for proper launch of app
|
||||
env = get_app_environments_for_context(
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
app_name
|
||||
from openpype.hosts.webpublisher.cli_functions import (
|
||||
cli_publish_from_app
|
||||
)
|
||||
print("env:: {}".format(env))
|
||||
os.environ.update(env)
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
# must pass identifier to update log lines for a batch
|
||||
os.environ["BATCH_LOG_ID"] = str(_id)
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
|
||||
pyblish.api.register_host(host_name)
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
targets = [targets]
|
||||
current_targets = os.environ.get("PYBLISH_TARGETS", "").split(
|
||||
os.pathsep)
|
||||
for target in targets:
|
||||
current_targets.append(target)
|
||||
|
||||
os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
|
||||
set(current_targets))
|
||||
|
||||
data = {
|
||||
"last_workfile_path": workfile_path,
|
||||
"start_last_workfile": True,
|
||||
"project_name": project_name,
|
||||
"asset_name": asset_name,
|
||||
"task_name": task_name
|
||||
}
|
||||
|
||||
launched_app = application_manager.launch(app_name, **data)
|
||||
|
||||
timeout = get_timeout(project_name, host_name, task_type)
|
||||
|
||||
time_start = time.time()
|
||||
while launched_app.poll() is None:
|
||||
time.sleep(0.5)
|
||||
if time.time() - time_start > timeout:
|
||||
launched_app.terminate()
|
||||
msg = "Timeout reached"
|
||||
fail_batch(_id, dbcon, msg)
|
||||
cli_publish_from_app(
|
||||
project_name, batch_path, host_name, user_email, targets
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remotepublish(project, batch_path, user_email, targets=None):
|
||||
|
|
@ -311,53 +216,12 @@ class PypeCommands:
|
|||
Raises:
|
||||
RuntimeError: When there is no path to process.
|
||||
"""
|
||||
if not batch_path:
|
||||
raise RuntimeError("No publish paths specified")
|
||||
|
||||
# Register target and host
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
from openpype.hosts.webpublisher.cli_functions import (
|
||||
cli_publish
|
||||
)
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.webpublisher import api as webpublisher
|
||||
|
||||
log = Logger.get_logger("remotepublish")
|
||||
|
||||
log.info("remotepublish command")
|
||||
|
||||
host_name = "webpublisher"
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
|
||||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_APP"] = host_name
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
pyblish.api.register_host(host_name)
|
||||
|
||||
if targets:
|
||||
if isinstance(targets, str):
|
||||
targets = [targets]
|
||||
for target in targets:
|
||||
pyblish.api.register_target(target)
|
||||
|
||||
install_host(webpublisher)
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
_, batch_id = os.path.split(batch_path)
|
||||
dbcon = get_webpublish_conn()
|
||||
_id = start_webpublish_log(dbcon, batch_id, user_email)
|
||||
|
||||
task_data = get_task_data(batch_path)
|
||||
if not task_data["context"]:
|
||||
msg = "Batch manifest must contain context data"
|
||||
msg += "Create new batch and set context properly."
|
||||
fail_batch(_id, dbcon, msg)
|
||||
|
||||
publish_and_log(dbcon, _id, log, batch_id=batch_id)
|
||||
|
||||
log.info("Publish finished.")
|
||||
cli_publish(project, batch_path, user_email, targets)
|
||||
|
||||
@staticmethod
|
||||
def extractenvironments(output_json_path, project, asset, task, app,
|
||||
|
|
@ -366,8 +230,10 @@ class PypeCommands:
|
|||
|
||||
Called by Deadline plugin to propagate environment into render jobs.
|
||||
"""
|
||||
|
||||
from openpype.lib.applications import get_app_environments_for_context
|
||||
|
||||
if all((project, asset, task, app)):
|
||||
from openpype.api import get_app_environments_for_context
|
||||
env = get_app_environments_for_context(
|
||||
project, asset, task, app, env_group
|
||||
)
|
||||
|
|
@ -469,7 +335,6 @@ class PypeCommands:
|
|||
sync_server_module.server_init()
|
||||
sync_server_module.server_start()
|
||||
|
||||
import time
|
||||
while True:
|
||||
time.sleep(1.0)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,12 @@
|
|||
try:
|
||||
from openpype.api import Logger
|
||||
import openpype.lib.remote_publish
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline.publish.lib import remote_publish
|
||||
except ImportError as exc:
|
||||
# Ensure Deadline fails by output an error that contains "Fatal Error:"
|
||||
raise ImportError("Fatal Error: %s" % exc)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Perform remote publish with thorough error checking
|
||||
log = Logger.get_logger(__name__)
|
||||
openpype.lib.remote_publish.publish(log, raise_error=True)
|
||||
remote_publish(log, raise_error=True)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue