Merge pull request #1876 from pypeclub/feature/webpublisher_backend

Feature/webpublisher backend
This commit is contained in:
Petr Kalis 2021-08-27 14:20:02 +02:00 committed by GitHub
commit bf1dad45ff
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
21 changed files with 1305 additions and 14 deletions

View file

@ -94,6 +94,31 @@ def eventserver(debug,
)
@main.command()
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-h", "--host", help="Host", default=None)
@click.option("-p", "--port", help="Port", default=None)
@click.option("-e", "--executable", help="Executable")
@click.option("-u", "--upload_dir", help="Upload dir")
def webpublisherwebserver(debug, executable, upload_dir, host=None, port=None):
"""Starts webserver for communication with Webpublish FR via command line
OP must be congigured on a machine, eg. OPENPYPE_MONGO filled AND
FTRACK_BOT_API_KEY provided with api key from Ftrack.
Expect "pype.club" user created on Ftrack.
"""
if debug:
os.environ['OPENPYPE_DEBUG'] = "3"
PypeCommands().launch_webpublisher_webservercli(
upload_dir=upload_dir,
executable=executable,
host=host,
port=port
)
@main.command()
@click.argument("output_json_path")
@click.option("--project", help="Project name", default=None)
@ -131,6 +156,25 @@ def publish(debug, paths, targets):
PypeCommands.publish(list(paths), targets)
@main.command()
@click.argument("path")
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-h", "--host", help="Host")
@click.option("-u", "--user", help="User email address")
@click.option("-p", "--project", help="Project")
@click.option("-t", "--targets", help="Targets", default=None,
multiple=True)
def remotepublish(debug, project, path, host, targets=None, user=None):
"""Start CLI publishing.
Publish collects json from paths provided as an argument.
More than one path is allowed.
"""
if debug:
os.environ['OPENPYPE_DEBUG'] = '3'
PypeCommands.remotepublish(project, path, host, user, targets=targets)
@main.command()
@click.option("-d", "--debug", is_flag=True, help="Print debug messages")
@click.option("-p", "--project", required=True,

View file

@ -0,0 +1,6 @@
Webpublisher
-------------
Plugins meant for processing of Webpublisher.
Gets triggered by calling openpype.cli.remotepublish with appropriate arguments.

View file

View file

@ -0,0 +1,43 @@
import os
import logging
from avalon import api as avalon
from avalon import io
from pyblish import api as pyblish
import openpype.hosts.webpublisher
log = logging.getLogger("openpype.hosts.webpublisher")
HOST_DIR = os.path.dirname(os.path.abspath(
openpype.hosts.webpublisher.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
def application_launch():
pass
def install():
print("Installing Pype config...")
pyblish.register_plugin_path(PUBLISH_PATH)
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
log.info(PUBLISH_PATH)
io.install()
avalon.on("application.launched", application_launch)
def uninstall():
pyblish.deregister_plugin_path(PUBLISH_PATH)
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
# to have required methods for interface
def ls():
pass

View file

@ -0,0 +1,28 @@
"""
Requires:
Nothing
Provides:
Instance
"""
import pyblish.api
from pprint import pformat
class CollectFPS(pyblish.api.InstancePlugin):
"""
Adds fps from context to instance because of ExtractReview
"""
label = "Collect fps"
order = pyblish.api.CollectorOrder + 0.49
hosts = ["webpublisher"]
def process(self, instance):
fps = instance.context.data["fps"]
instance.data.update({
"fps": fps
})
self.log.debug(f"instance.data: {pformat(instance.data)}")

View file

@ -0,0 +1,267 @@
"""Loads publishing context from json and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
Provides:
context, instances -> All data from previous publishing process.
"""
import os
import json
import clique
import pyblish.api
from avalon import io
from openpype.lib import prepare_template_data
class CollectPublishedFiles(pyblish.api.ContextPlugin):
"""
This collector will try to find json files in provided
`OPENPYPE_PUBLISH_DATA`. Those files _MUST_ share same context.
"""
# must be really early, context values are only in json file
order = pyblish.api.CollectorOrder - 0.490
label = "Collect rendered frames"
host = ["webpublisher"]
_context = None
# from Settings
task_type_to_family = {}
def _load_json(self, path):
path = path.strip('\"')
assert os.path.isfile(path), (
"Path to json file doesn't exist. \"{}\"".format(path)
)
data = None
with open(path, "r") as json_file:
try:
data = json.load(json_file)
except Exception as exc:
self.log.error(
"Error loading json: "
"{} - Exception: {}".format(path, exc)
)
return data
def _process_batch(self, dir_url):
task_subfolders = [
os.path.join(dir_url, o)
for o in os.listdir(dir_url)
if os.path.isdir(os.path.join(dir_url, o))]
self.log.info("task_sub:: {}".format(task_subfolders))
for task_dir in task_subfolders:
task_data = self._load_json(os.path.join(task_dir,
"manifest.json"))
self.log.info("task_data:: {}".format(task_data))
ctx = task_data["context"]
task_type = "default_task_type"
task_name = None
if ctx["type"] == "task":
items = ctx["path"].split('/')
asset = items[-2]
os.environ["AVALON_TASK"] = ctx["name"]
task_name = ctx["name"]
task_type = ctx["attributes"]["type"]
else:
asset = ctx["name"]
os.environ["AVALON_TASK"] = ""
is_sequence = len(task_data["files"]) > 1
_, extension = os.path.splitext(task_data["files"][0])
family, families, subset_template, tags = self._get_family(
self.task_type_to_family,
task_type,
is_sequence,
extension.replace(".", ''))
subset = self._get_subset_name(family, subset_template, task_name,
task_data["variant"])
os.environ["AVALON_ASSET"] = asset
io.Session["AVALON_ASSET"] = asset
instance = self._context.create_instance(subset)
instance.data["asset"] = asset
instance.data["subset"] = subset
instance.data["family"] = family
instance.data["families"] = families
instance.data["version"] = \
self._get_last_version(asset, subset) + 1
instance.data["stagingDir"] = task_dir
instance.data["source"] = "webpublisher"
# to store logging info into DB openpype.webpublishes
instance.data["ctx_path"] = ctx["path"]
instance.data["batch_id"] = task_data["batch"]
# to convert from email provided into Ftrack username
instance.data["user_email"] = task_data["user"]
if is_sequence:
instance.data["representations"] = self._process_sequence(
task_data["files"], task_dir, tags
)
instance.data["frameStart"] = \
instance.data["representations"][0]["frameStart"]
instance.data["frameEnd"] = \
instance.data["representations"][0]["frameEnd"]
else:
instance.data["representations"] = self._get_single_repre(
task_dir, task_data["files"], tags
)
self.log.info("instance.data:: {}".format(instance.data))
def _get_subset_name(self, family, subset_template, task_name, variant):
fill_pairs = {
"variant": variant,
"family": family,
"task": task_name
}
subset = subset_template.format(**prepare_template_data(fill_pairs))
return subset
def _get_single_repre(self, task_dir, files, tags):
_, ext = os.path.splitext(files[0])
repre_data = {
"name": ext[1:],
"ext": ext[1:],
"files": files[0],
"stagingDir": task_dir,
"tags": tags
}
self.log.info("single file repre_data.data:: {}".format(repre_data))
return [repre_data]
def _process_sequence(self, files, task_dir, tags):
"""Prepare reprentations for sequence of files."""
collections, remainder = clique.assemble(files)
assert len(collections) == 1, \
"Too many collections in {}".format(files)
frame_start = list(collections[0].indexes)[0]
frame_end = list(collections[0].indexes)[-1]
ext = collections[0].tail
repre_data = {
"frameStart": frame_start,
"frameEnd": frame_end,
"name": ext[1:],
"ext": ext[1:],
"files": files,
"stagingDir": task_dir,
"tags": tags
}
self.log.info("sequences repre_data.data:: {}".format(repre_data))
return [repre_data]
def _get_family(self, settings, task_type, is_sequence, extension):
"""Guess family based on input data.
Args:
settings (dict): configuration per task_type
task_type (str): Animation|Art etc
is_sequence (bool): single file or sequence
extension (str): without '.'
Returns:
(family, [families], subset_template_name, tags) tuple
AssertionError if not matching family found
"""
task_obj = settings.get(task_type)
assert task_obj, "No family configuration for '{}'".format(task_type)
found_family = None
for family, content in task_obj.items():
if is_sequence != content["is_sequence"]:
continue
if extension in content["extensions"] or \
'' in content["extensions"]: # all extensions setting
found_family = family
break
msg = "No family found for combination of " +\
"task_type: {}, is_sequence:{}, extension: {}".format(
task_type, is_sequence, extension)
assert found_family, msg
return found_family, \
content["families"], \
content["subset_template_name"], \
content["tags"]
def _get_last_version(self, asset_name, subset_name):
"""Returns version number or 0 for 'asset' and 'subset'"""
query = [
{
"$match": {"type": "asset", "name": asset_name}
},
{
"$lookup":
{
"from": os.environ["AVALON_PROJECT"],
"localField": "_id",
"foreignField": "parent",
"as": "subsets"
}
},
{
"$unwind": "$subsets"
},
{
"$match": {"subsets.type": "subset",
"subsets.name": subset_name}},
{
"$lookup":
{
"from": os.environ["AVALON_PROJECT"],
"localField": "subsets._id",
"foreignField": "parent",
"as": "versions"
}
},
{
"$unwind": "$versions"
},
{
"$group": {
"_id": {
"asset_name": "$name",
"subset_name": "$subsets.name"
},
'version': {'$max': "$versions.name"}
}
}
]
version = list(io.aggregate(query))
if version:
return version[0].get("version") or 0
else:
return 0
def process(self, context):
self._context = context
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
assert batch_dir, (
"Missing `OPENPYPE_PUBLISH_DATA`")
assert batch_dir, \
"Folder {} doesn't exist".format(batch_dir)
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Could not set project `root` which may cause issues."
)
self._process_batch(batch_dir)

View file

@ -0,0 +1,38 @@
import os
import pyblish.api
from openpype.lib import OpenPypeMongoConnection
class IntegrateContextToLog(pyblish.api.ContextPlugin):
""" Adds context information to log document for displaying in front end"""
label = "Integrate Context to Log"
order = pyblish.api.IntegratorOrder - 0.1
hosts = ["webpublisher"]
def process(self, context):
self.log.info("Integrate Context to Log")
mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
dbcon = mongo_client[database_name]["webpublishes"]
for instance in context:
self.log.info("ctx_path: {}".format(instance.data.get("ctx_path")))
self.log.info("batch_id: {}".format(instance.data.get("batch_id")))
if instance.data.get("ctx_path") and instance.data.get("batch_id"):
self.log.info("Updating log record")
dbcon.update_one(
{
"batch_id": instance.data.get("batch_id"),
"status": "in_progress"
},
{"$set":
{
"path": instance.data.get("ctx_path")
}}
)
return

View file

@ -0,0 +1,247 @@
"""Routes and etc. for webpublisher API."""
import os
import json
import datetime
from bson.objectid import ObjectId
import collections
from aiohttp.web_response import Response
import subprocess
from avalon.api import AvalonMongoDB
from openpype.lib import OpenPypeMongoConnection
from openpype_modules.avalon_apps.rest_api import _RestApiEndpoint
from openpype.lib import PypeLogger
log = PypeLogger.get_logger("WebServer")
class RestApiResource:
"""Resource carrying needed info and Avalon DB connection for publish."""
def __init__(self, server_manager, executable, upload_dir):
self.server_manager = server_manager
self.upload_dir = upload_dir
self.executable = executable
self.dbcon = AvalonMongoDB()
self.dbcon.install()
@staticmethod
def json_dump_handler(value):
if isinstance(value, datetime.datetime):
return value.isoformat()
if isinstance(value, ObjectId):
return str(value)
raise TypeError(value)
@classmethod
def encode(cls, data):
return json.dumps(
data,
indent=4,
default=cls.json_dump_handler
).encode("utf-8")
class OpenPypeRestApiResource(RestApiResource):
"""Resource carrying OP DB connection for storing batch info into DB."""
def __init__(self, ):
mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
self.dbcon = mongo_client[database_name]["webpublishes"]
class WebpublisherProjectsEndpoint(_RestApiEndpoint):
"""Returns list of dict with project info (id, name)."""
async def get(self) -> Response:
output = []
for project_name in self.dbcon.database.collection_names():
project_doc = self.dbcon.database[project_name].find_one({
"type": "project"
})
if project_doc:
ret_val = {
"id": project_doc["_id"],
"name": project_doc["name"]
}
output.append(ret_val)
return Response(
status=200,
body=self.resource.encode(output),
content_type="application/json"
)
class WebpublisherHiearchyEndpoint(_RestApiEndpoint):
"""Returns dictionary with context tree from assets."""
async def get(self, project_name) -> Response:
query_projection = {
"_id": 1,
"data.tasks": 1,
"data.visualParent": 1,
"data.entityType": 1,
"name": 1,
"type": 1,
}
asset_docs = self.dbcon.database[project_name].find(
{"type": "asset"},
query_projection
)
asset_docs_by_id = {
asset_doc["_id"]: asset_doc
for asset_doc in asset_docs
}
asset_docs_by_parent_id = collections.defaultdict(list)
for asset_doc in asset_docs_by_id.values():
parent_id = asset_doc["data"].get("visualParent")
asset_docs_by_parent_id[parent_id].append(asset_doc)
assets = collections.defaultdict(list)
for parent_id, children in asset_docs_by_parent_id.items():
for child in children:
node = assets.get(child["_id"])
if not node:
node = Node(child["_id"],
child["data"].get("entityType", "Folder"),
child["name"])
assets[child["_id"]] = node
tasks = child["data"].get("tasks", {})
for t_name, t_con in tasks.items():
task_node = TaskNode("task", t_name)
task_node["attributes"]["type"] = t_con.get("type")
task_node.parent = node
parent_node = assets.get(parent_id)
if not parent_node:
asset_doc = asset_docs_by_id.get(parent_id)
if asset_doc: # regular node
parent_node = Node(parent_id,
asset_doc["data"].get("entityType",
"Folder"),
asset_doc["name"])
else: # root
parent_node = Node(parent_id,
"project",
project_name)
assets[parent_id] = parent_node
node.parent = parent_node
roots = [x for x in assets.values() if x.parent is None]
return Response(
status=200,
body=self.resource.encode(roots[0]),
content_type="application/json"
)
class Node(dict):
"""Node element in context tree."""
def __init__(self, uid, node_type, name):
self._parent = None # pointer to parent Node
self["type"] = node_type
self["name"] = name
self['id'] = uid # keep reference to id #
self['children'] = [] # collection of pointers to child Nodes
@property
def parent(self):
return self._parent # simply return the object at the _parent pointer
@parent.setter
def parent(self, node):
self._parent = node
# add this node to parent's list of children
node['children'].append(self)
class TaskNode(Node):
"""Special node type only for Tasks."""
def __init__(self, node_type, name):
self._parent = None
self["type"] = node_type
self["name"] = name
self["attributes"] = {}
class WebpublisherBatchPublishEndpoint(_RestApiEndpoint):
"""Triggers headless publishing of batch."""
async def post(self, request) -> Response:
output = {}
log.info("WebpublisherBatchPublishEndpoint called")
content = await request.json()
batch_path = os.path.join(self.resource.upload_dir,
content["batch"])
openpype_app = self.resource.executable
args = [
openpype_app,
'remotepublish',
batch_path
]
if not openpype_app or not os.path.exists(openpype_app):
msg = "Non existent OpenPype executable {}".format(openpype_app)
raise RuntimeError(msg)
add_args = {
"host": "webpublisher",
"project": content["project_name"],
"user": content["user"]
}
for key, value in add_args.items():
args.append("--{}".format(key))
args.append(value)
log.info("args:: {}".format(args))
subprocess.call(args)
return Response(
status=200,
body=self.resource.encode(output),
content_type="application/json"
)
class WebpublisherTaskPublishEndpoint(_RestApiEndpoint):
"""Prepared endpoint triggered after each task - for future development."""
async def post(self, request) -> Response:
return Response(
status=200,
body=self.resource.encode([]),
content_type="application/json"
)
class BatchStatusEndpoint(_RestApiEndpoint):
"""Returns dict with info for batch_id."""
async def get(self, batch_id) -> Response:
output = self.dbcon.find_one({"batch_id": batch_id})
return Response(
status=200,
body=self.resource.encode(output),
content_type="application/json"
)
class PublishesStatusEndpoint(_RestApiEndpoint):
"""Returns list of dict with batch info for user (email address)."""
async def get(self, user) -> Response:
output = list(self.dbcon.find({"user": user}))
return Response(
status=200,
body=self.resource.encode(output),
content_type="application/json"
)

View file

@ -0,0 +1,141 @@
import time
import os
from datetime import datetime
import requests
import json
from openpype.lib import PypeLogger
from .webpublish_routes import (
RestApiResource,
OpenPypeRestApiResource,
WebpublisherBatchPublishEndpoint,
WebpublisherTaskPublishEndpoint,
WebpublisherHiearchyEndpoint,
WebpublisherProjectsEndpoint,
BatchStatusEndpoint,
PublishesStatusEndpoint
)
log = PypeLogger().get_logger("webserver_gui")
def run_webserver(*args, **kwargs):
"""Runs webserver in command line, adds routes."""
from openpype.modules import ModulesManager
manager = ModulesManager()
webserver_module = manager.modules_by_name["webserver"]
host = kwargs.get("host") or "localhost"
port = kwargs.get("port") or 8079
server_manager = webserver_module.create_new_server_manager(port, host)
webserver_url = server_manager.url
resource = RestApiResource(server_manager,
upload_dir=kwargs["upload_dir"],
executable=kwargs["executable"])
projects_endpoint = WebpublisherProjectsEndpoint(resource)
server_manager.add_route(
"GET",
"/api/projects",
projects_endpoint.dispatch
)
hiearchy_endpoint = WebpublisherHiearchyEndpoint(resource)
server_manager.add_route(
"GET",
"/api/hierarchy/{project_name}",
hiearchy_endpoint.dispatch
)
# triggers publish
webpublisher_task_publish_endpoint = \
WebpublisherBatchPublishEndpoint(resource)
server_manager.add_route(
"POST",
"/api/webpublish/batch",
webpublisher_task_publish_endpoint.dispatch
)
webpublisher_batch_publish_endpoint = \
WebpublisherTaskPublishEndpoint(resource)
server_manager.add_route(
"POST",
"/api/webpublish/task",
webpublisher_batch_publish_endpoint.dispatch
)
# reporting
openpype_resource = OpenPypeRestApiResource()
batch_status_endpoint = BatchStatusEndpoint(openpype_resource)
server_manager.add_route(
"GET",
"/api/batch_status/{batch_id}",
batch_status_endpoint.dispatch
)
user_status_endpoint = PublishesStatusEndpoint(openpype_resource)
server_manager.add_route(
"GET",
"/api/publishes/{user}",
user_status_endpoint.dispatch
)
server_manager.start_server()
last_reprocessed = time.time()
while True:
if time.time() - last_reprocessed > 20:
reprocess_failed(kwargs["upload_dir"], webserver_url)
last_reprocessed = time.time()
time.sleep(1.0)
def reprocess_failed(upload_dir, webserver_url):
# log.info("check_reprocesable_records")
from openpype.lib import OpenPypeMongoConnection
mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
dbcon = mongo_client[database_name]["webpublishes"]
results = dbcon.find({"status": "reprocess"})
for batch in results:
batch_url = os.path.join(upload_dir,
batch["batch_id"],
"manifest.json")
log.info("batch:: {} {}".format(os.path.exists(batch_url), batch_url))
if not os.path.exists(batch_url):
msg = "Manifest {} not found".format(batch_url)
print(msg)
dbcon.update_one(
{"_id": batch["_id"]},
{"$set":
{
"finish_date": datetime.now(),
"status": "error",
"progress": 1,
"log": batch.get("log") + msg
}}
)
continue
server_url = "{}/api/webpublish/batch".format(webserver_url)
with open(batch_url) as f:
data = json.loads(f.read())
try:
r = requests.post(server_url, json=data)
log.info("response{}".format(r))
except Exception:
log.info("exception", exc_info=True)
dbcon.update_one(
{"_id": batch["_id"]},
{"$set":
{
"finish_date": datetime.now(),
"status": "sent_for_reprocessing",
"progress": 1
}}
)

View file

@ -0,0 +1,49 @@
"""Loads publishing context from json and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
Provides:
context, instances -> All data from previous publishing process.
"""
import ftrack_api
import os
import pyblish.api
class CollectUsername(pyblish.api.ContextPlugin):
"""
Translates user email to Ftrack username.
Emails in Ftrack are same as company's Slack, username is needed to
load data to Ftrack.
Expects "pype.club" user created on Ftrack and FTRACK_BOT_API_KEY env
var set up.
"""
order = pyblish.api.CollectorOrder - 0.488
label = "Collect ftrack username"
host = ["webpublisher"]
_context = None
def process(self, context):
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
self.log.info("CollectUsername")
for instance in context:
email = instance.data["user_email"]
self.log.info("email:: {}".format(email))
session = ftrack_api.Session(auto_connect_event_hub=False)
user = session.query("User where email like '{}'".format(
email))
if not user:
raise ValueError(
"Couldnt find user with {} email".format(email))
os.environ["FTRACK_API_USER"] = user[0].get("username")
break

View file

@ -10,8 +10,9 @@ log = PypeLogger.get_logger("WebServer")
class WebServerManager:
"""Manger that care about web server thread."""
def __init__(self, module):
self.module = module
def __init__(self, port=None, host=None):
self.port = port or 8079
self.host = host or "localhost"
self.client = None
self.handlers = {}
@ -24,8 +25,8 @@ class WebServerManager:
self.webserver_thread = WebServerThread(self)
@property
def port(self):
return self.module.port
def url(self):
return "http://{}:{}".format(self.host, self.port)
def add_route(self, *args, **kwargs):
self.app.router.add_route(*args, **kwargs)
@ -78,6 +79,10 @@ class WebServerThread(threading.Thread):
def port(self):
return self.manager.port
@property
def host(self):
return self.manager.host
def run(self):
self.is_running = True
@ -110,7 +115,7 @@ class WebServerThread(threading.Thread):
""" Starts runner and TCPsite """
self.runner = web.AppRunner(self.manager.app)
await self.runner.setup()
self.site = web.TCPSite(self.runner, 'localhost', self.port)
self.site = web.TCPSite(self.runner, self.host, self.port)
await self.site.start()
def stop(self):

View file

@ -13,12 +13,15 @@ class WebServerModule(OpenPypeModule, ITrayService):
name = "webserver"
label = "WebServer"
webserver_url_env = "OPENPYPE_WEBSERVER_URL"
def initialize(self, _module_settings):
self.enabled = True
self.server_manager = None
self._host_listener = None
self.port = self.find_free_port()
self.webserver_url = None
def connect_with_modules(self, enabled_modules):
if not self.server_manager:
@ -43,10 +46,8 @@ class WebServerModule(OpenPypeModule, ITrayService):
static_prefix = "/res"
self.server_manager.add_static(static_prefix, resources.RESOURCES_DIR)
webserver_url = "http://localhost:{}".format(self.port)
os.environ["OPENPYPE_WEBSERVER_URL"] = webserver_url
os.environ["OPENPYPE_STATICS_SERVER"] = "{}{}".format(
webserver_url, static_prefix
self.webserver_url, static_prefix
)
def _add_listeners(self):
@ -64,17 +65,34 @@ class WebServerModule(OpenPypeModule, ITrayService):
if self.server_manager:
self.server_manager.stop_server()
@staticmethod
def create_new_server_manager(port=None, host=None):
"""Create webserver manager for passed port and host.
Args:
port(int): Port on which wil webserver listen.
host(str): Host name or IP address. Default is 'localhost'.
Returns:
WebServerManager: Prepared manager.
"""
from .server import WebServerManager
return WebServerManager(port, host)
def create_server_manager(self):
if self.server_manager:
return
from .server import WebServerManager
self.server_manager = WebServerManager(self)
self.server_manager = self.create_new_server_manager(self.port)
self.server_manager.on_stop_callbacks.append(
self.set_service_failed_icon
)
webserver_url = self.server_manager.url
os.environ[self.webserver_url_env] = str(webserver_url)
self.webserver_url = webserver_url
@staticmethod
def find_free_port(
port_from=None, port_to=None, exclude_ports=None, host=None

View file

@ -45,6 +45,7 @@ class ExtractBurnin(openpype.api.Extractor):
"fusion",
"aftereffects",
"tvpaint",
"webpublisher",
"aftereffects"
# "resolve"
]

View file

@ -17,7 +17,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
"imagesequence", "render", "render2d",
"source", "plate", "take"
]
hosts = ["shell", "fusion", "resolve"]
hosts = ["shell", "fusion", "resolve", "webpublisher"]
enabled = False
# presetable attribute

View file

@ -45,6 +45,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
"fusion",
"tvpaint",
"resolve",
"webpublisher",
"aftereffects"
]

View file

@ -3,7 +3,7 @@
import os
import sys
import json
from pathlib import Path
from datetime import datetime
from openpype.lib import PypeLogger
from openpype.api import get_app_environments_for_context
@ -40,6 +40,12 @@ class PypeCommands:
)
return run_event_server(*args)
@staticmethod
def launch_webpublisher_webservercli(*args, **kwargs):
from openpype.hosts.webpublisher.webserver_service.webserver_cli \
import (run_webserver)
return run_webserver(*args, **kwargs)
@staticmethod
def launch_standalone_publisher():
from openpype.tools import standalonepublish
@ -104,6 +110,123 @@ class PypeCommands:
log.info("Publish finished.")
uninstall()
@staticmethod
def remotepublish(project, batch_path, host, user, targets=None):
"""Start headless publishing.
Publish use json from passed paths argument.
Args:
project (str): project to publish (only single context is expected
per call of remotepublish
batch_path (str): Path batch folder. Contains subfolders with
resources (workfile, another subfolder 'renders' etc.)
targets (string): What module should be targeted
(to choose validator for example)
host (string)
user (string): email address for webpublisher
Raises:
RuntimeError: When there is no path to process.
"""
if not batch_path:
raise RuntimeError("No publish paths specified")
from openpype import install, uninstall
from openpype.api import Logger
from openpype.lib import OpenPypeMongoConnection
# Register target and host
import pyblish.api
import pyblish.util
log = Logger.get_logger()
log.info("remotepublish command")
install()
if host:
pyblish.api.register_host(host)
if targets:
if isinstance(targets, str):
targets = [targets]
for target in targets:
pyblish.api.register_target(target)
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
os.environ["AVALON_PROJECT"] = project
os.environ["AVALON_APP"] = host
import avalon.api
from openpype.hosts.webpublisher import api as webpublisher
avalon.api.install(webpublisher)
log.info("Running publish ...")
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
dbcon = mongo_client[database_name]["webpublishes"]
_, batch_id = os.path.split(batch_path)
_id = dbcon.insert_one({
"batch_id": batch_id,
"start_date": datetime.now(),
"user": user,
"status": "in_progress"
}).inserted_id
log_lines = []
for result in pyblish.util.publish_iter():
for record in result["records"]:
log_lines.append("{}: {}".format(
result["plugin"].label, record.msg))
if result["error"]:
log.error(error_format.format(**result))
uninstall()
log_lines.append(error_format.format(**result))
dbcon.update_one(
{"_id": _id},
{"$set":
{
"finish_date": datetime.now(),
"status": "error",
"log": os.linesep.join(log_lines)
}}
)
sys.exit(1)
else:
dbcon.update_one(
{"_id": _id},
{"$set":
{
"progress": max(result["progress"], 0.95),
"log": os.linesep.join(log_lines)
}}
)
dbcon.update_one(
{"_id": _id},
{"$set":
{
"finish_date": datetime.now(),
"status": "finished_ok",
"progress": 1,
"log": os.linesep.join(log_lines)
}}
)
log.info("Publish finished.")
uninstall()
@staticmethod
def extractenvironments(output_json_path, project, asset, task, app):
env = os.environ.copy()
if all((project, asset, task, app)):

View file

@ -0,0 +1,120 @@
{
"publish": {
"CollectPublishedFiles": {
"task_type_to_family": {
"Animation": {
"workfile": {
"is_sequence": false,
"extensions": [
"tvp"
],
"families": [],
"tags": [],
"subset_template_name": ""
},
"render": {
"is_sequence": true,
"extensions": [
"png",
"exr",
"tiff",
"tif"
],
"families": [
"review"
],
"tags": [
"review"
],
"subset_template_name": ""
}
},
"Compositing": {
"workfile": {
"is_sequence": false,
"extensions": [
"aep"
],
"families": [],
"tags": [],
"subset_template_name": ""
},
"render": {
"is_sequence": true,
"extensions": [
"png",
"exr",
"tiff",
"tif"
],
"families": [
"review"
],
"tags": [
"review"
],
"subset_template_name": ""
}
},
"Layout": {
"workfile": {
"is_sequence": false,
"extensions": [
"psd"
],
"families": [],
"tags": [],
"subset_template_name": ""
},
"image": {
"is_sequence": false,
"extensions": [
"png",
"jpg",
"jpeg",
"tiff",
"tif"
],
"families": [
"review"
],
"tags": [
"review"
],
"subset_template_name": ""
}
},
"default_task_type": {
"workfile": {
"is_sequence": false,
"extensions": [
"tvp"
],
"families": [],
"tags": [],
"subset_template_name": "{family}{Variant}"
},
"render": {
"is_sequence": true,
"extensions": [
"png",
"exr",
"tiff",
"tif"
],
"families": [
"review"
],
"tags": [
"review"
],
"subset_template_name": "{family}{Variant}"
}
},
"__dynamic_keys_labels__": {
"default_task_type": "Default task type"
}
}
}
}
}

View file

@ -118,6 +118,10 @@
"type": "schema",
"name": "schema_project_standalonepublisher"
},
{
"type": "schema",
"name": "schema_project_webpublisher"
},
{
"type": "schema",
"name": "schema_project_unreal"

View file

@ -0,0 +1,69 @@
{
"type": "dict",
"collapsible": true,
"key": "webpublisher",
"label": "Web Publisher",
"is_file": true,
"children": [
{
"type": "dict",
"collapsible": true,
"key": "publish",
"label": "Publish plugins",
"children": [
{
"type": "dict",
"collapsible": true,
"key": "CollectPublishedFiles",
"label": "Collect Published Files",
"children": [
{
"type": "dict-modifiable",
"collapsible": true,
"key": "task_type_to_family",
"label": "Task type to family mapping",
"collapsible_key": true,
"object_type": {
"type": "dict-modifiable",
"collapsible": false,
"key": "task_type",
"collapsible_key": false,
"object_type": {
"type": "dict",
"children": [
{
"type": "boolean",
"key": "is_sequence",
"label": "Is Sequence"
},
{
"type": "list",
"key": "extensions",
"label": "Extensions",
"object_type": "text"
},
{
"type": "list",
"key": "families",
"label": "Families",
"object_type": "text"
},
{
"type": "schema",
"name": "schema_representation_tags"
},
{
"type": "text",
"key": "subset_template_name",
"label": "Subset template name"
}
]
}
}
}
]
}
]
}
]
}

View file

@ -8,7 +8,10 @@
"burnin": "Add burnins"
},
{
"ftrackreview": "Add to Ftrack"
"review": "Create review"
},
{
"ftrackreview": "Add review to Ftrack"
},
{
"delete": "Delete output"

View file

@ -0,0 +1,84 @@
---
id: admin_webserver_for_webpublisher
title: Webserver for webpublisher
sidebar_label: Webserver for webpublisher
---
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
Running Openpype webserver is needed as a backend part for Web publishing.
Any OS supported by Openpype could be used as a host server.
Webpublishing consists of two sides, Front end (FE) and Openpype backend. This documenation is only targeted on OP side.
It is expected that FE and OP will live on two separate servers, FE publicly available, OP safely in customer network.
# Requirements for servers
- OP server allows access to its `8079` port for FE. (It is recommended to whitelist only FE IP.)
- have shared folder for published resources (images, workfiles etc) on both servers
# Prepare Ftrack
Current webpublish process expects authentication via Slack. It is expected that customer has users created on a Ftrack
with same email addresses as on Slack. As some customer might have usernames different from emails, conversion from email to username is needed.
For this "pype.club" user needs to be present on Ftrack, creation of this user should be standard part of Ftrack preparation for Openpype.
Next create API key on Ftrack, store this information temporarily as you won't have access to this key after creation.
# Prepare Openpype
Deploy OP build distribution (Openpype Igniter) on an OS of your choice.
##Run webserver as a Linux service:
(This expects that OP Igniter is deployed to `opt/openpype` and log should be stored in `/tmp/openpype.log`)
- create file `sudo vi /opt/openpype/webpublisher_webserver.sh`
- paste content
```sh
#!/usr/bin/env bash
export OPENPYPE_DEBUG=3
export WEBSERVER_HOST_IP=localhost
export FTRACK_BOT_API_USER=YOUR_API_USER
export FTRACK_BOT_API_KEY=YOUR_API_KEY
export PYTHONDONTWRITEBYTECODE=1
export OPENPYPE_MONGO=YOUR_MONGODB_CONNECTION
pushd /opt/openpype
./openpype_console webpublisherwebserver --upload_dir YOUR_SHARED_FOLDER_ON_HOST --executable /opt/openpype/openpype_console > /tmp/openpype.log 2>&1
```
1. create service file `sudo vi /etc/systemd/system/openpye-webserver.service`
2. paste content
```sh
[Unit]
Description=Run OpenPype Ftrack Webserver Service
After=network.target
[Service]
Type=idle
ExecStart=/opt/openpype/webpublisher_webserver.sh
Restart=on-failure
RestartSec=10s
StandardOutput=append:/tmp/openpype.log
StandardError=append:/tmp/openpype.log
[Install]
WantedBy=multi-user.target
```
5. change file permission:
`sudo chmod 0755 /etc/systemd/system/openpype-webserver.service`
6. enable service:
`sudo systemctl enable openpype-webserver`
7. start service:
`sudo systemctl start openpype-webserver`
8. Check `/tmp/openpype.log` if OP got started
(Note: service could be restarted by `service openpype-webserver restart` - this will result in purge of current log file!)