mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into feature/OP-3817_Move-aftereffects-functions-to-aftereffects
This commit is contained in:
commit
35b389f4a2
108 changed files with 1725 additions and 1092 deletions
|
|
@ -24,7 +24,7 @@ from openpype.settings.constants import (
|
|||
METADATA_KEYS,
|
||||
M_DYNAMIC_KEY_LABEL
|
||||
)
|
||||
from . import PypeLogger
|
||||
from .log import Logger
|
||||
from .profiles_filtering import filter_profiles
|
||||
from .local_settings import get_openpype_username
|
||||
|
||||
|
|
@ -138,7 +138,7 @@ def get_logger():
|
|||
"""Global lib.applications logger getter."""
|
||||
global _logger
|
||||
if _logger is None:
|
||||
_logger = PypeLogger.get_logger(__name__)
|
||||
_logger = Logger.get_logger(__name__)
|
||||
return _logger
|
||||
|
||||
|
||||
|
|
@ -373,7 +373,7 @@ class ApplicationManager:
|
|||
"""
|
||||
|
||||
def __init__(self, system_settings=None):
|
||||
self.log = PypeLogger.get_logger(self.__class__.__name__)
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.app_groups = {}
|
||||
self.applications = {}
|
||||
|
|
@ -735,7 +735,7 @@ class LaunchHook:
|
|||
|
||||
Always should be called
|
||||
"""
|
||||
self.log = PypeLogger().get_logger(self.__class__.__name__)
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
self.launch_context = launch_context
|
||||
|
||||
|
|
@ -877,7 +877,7 @@ class ApplicationLaunchContext:
|
|||
|
||||
# Logger
|
||||
logger_name = "{}-{}".format(self.__class__.__name__, self.app_name)
|
||||
self.log = PypeLogger.get_logger(logger_name)
|
||||
self.log = Logger.get_logger(logger_name)
|
||||
|
||||
self.executable = executable
|
||||
|
||||
|
|
@ -950,6 +950,63 @@ class ApplicationLaunchContext:
|
|||
)
|
||||
self.kwargs["env"] = value
|
||||
|
||||
def _collect_addons_launch_hook_paths(self):
|
||||
"""Helper to collect application launch hooks from addons.
|
||||
|
||||
Module have to have implemented 'get_launch_hook_paths' method which
|
||||
can expect appliction as argument or nothing.
|
||||
|
||||
Returns:
|
||||
List[str]: Paths to launch hook directories.
|
||||
"""
|
||||
|
||||
expected_types = (list, tuple, set)
|
||||
|
||||
output = []
|
||||
for module in self.modules_manager.get_enabled_modules():
|
||||
# Skip module if does not have implemented 'get_launch_hook_paths'
|
||||
func = getattr(module, "get_launch_hook_paths", None)
|
||||
if func is None:
|
||||
continue
|
||||
|
||||
func = module.get_launch_hook_paths
|
||||
if hasattr(inspect, "signature"):
|
||||
sig = inspect.signature(func)
|
||||
expect_args = len(sig.parameters) > 0
|
||||
else:
|
||||
expect_args = len(inspect.getargspec(func)[0]) > 0
|
||||
|
||||
# Pass application argument if method expect it.
|
||||
try:
|
||||
if expect_args:
|
||||
hook_paths = func(self.application)
|
||||
else:
|
||||
hook_paths = func()
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to call 'get_launch_hook_paths'",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
if not hook_paths:
|
||||
continue
|
||||
|
||||
# Convert string to list
|
||||
if isinstance(hook_paths, six.string_types):
|
||||
hook_paths = [hook_paths]
|
||||
|
||||
# Skip invalid types
|
||||
if not isinstance(hook_paths, expected_types):
|
||||
self.log.warning((
|
||||
"Result of `get_launch_hook_paths`"
|
||||
" has invalid type {}. Expected {}"
|
||||
).format(type(hook_paths), expected_types))
|
||||
continue
|
||||
|
||||
output.extend(hook_paths)
|
||||
return output
|
||||
|
||||
def paths_to_launch_hooks(self):
|
||||
"""Directory paths where to look for launch hooks."""
|
||||
# This method has potential to be part of application manager (maybe).
|
||||
|
|
@ -983,9 +1040,7 @@ class ApplicationLaunchContext:
|
|||
paths.append(path)
|
||||
|
||||
# Load modules paths
|
||||
paths.extend(
|
||||
self.modules_manager.collect_launch_hook_paths(self.application)
|
||||
)
|
||||
paths.extend(self._collect_addons_launch_hook_paths())
|
||||
|
||||
return paths
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import platform
|
|||
import json
|
||||
import tempfile
|
||||
|
||||
from .log import PypeLogger as Logger
|
||||
from .log import Logger
|
||||
from .vendor_bin_utils import find_executable
|
||||
|
||||
# MSDN process creation flag (Windows only)
|
||||
|
|
@ -40,7 +40,7 @@ def execute(args,
|
|||
|
||||
log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:']
|
||||
|
||||
log = Logger().get_logger('execute')
|
||||
log = Logger.get_logger('execute')
|
||||
log.info("Executing ({})".format(" ".join(args)))
|
||||
popen = subprocess.Popen(
|
||||
args,
|
||||
|
|
|
|||
|
|
@ -486,12 +486,18 @@ class Logger:
|
|||
|
||||
|
||||
class PypeLogger(Logger):
|
||||
"""Duplicate of 'Logger'.
|
||||
|
||||
Deprecated:
|
||||
Class will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def get_logger(cls, *args, **kwargs):
|
||||
logger = Logger.get_logger(*args, **kwargs)
|
||||
# TODO uncomment when replaced most of places
|
||||
# logger.warning((
|
||||
# "'openpype.lib.PypeLogger' is deprecated class."
|
||||
# " Please use 'openpype.lib.Logger' instead."
|
||||
# ))
|
||||
logger.warning((
|
||||
"'openpype.lib.PypeLogger' is deprecated class."
|
||||
" Please use 'openpype.lib.Logger' instead."
|
||||
))
|
||||
return logger
|
||||
|
|
|
|||
|
|
@ -6,11 +6,6 @@ import collections
|
|||
|
||||
import six
|
||||
|
||||
from .log import PypeLogger
|
||||
|
||||
log = PypeLogger.get_logger(__name__)
|
||||
|
||||
|
||||
KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})")
|
||||
KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+")
|
||||
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
||||
|
|
|
|||
|
|
@ -373,48 +373,3 @@ def source_hash(filepath, *args):
|
|||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
|
||||
|
||||
def parse_json(path):
|
||||
"""Parses json file at 'path' location
|
||||
|
||||
Returns:
|
||||
(dict) or None if unparsable
|
||||
Raises:
|
||||
AsssertionError if 'path' doesn't exist
|
||||
"""
|
||||
path = path.strip('\"')
|
||||
assert os.path.isfile(path), (
|
||||
"Path to json file doesn't exist. \"{}\"".format(path)
|
||||
)
|
||||
data = None
|
||||
with open(path, "r") as json_file:
|
||||
try:
|
||||
data = json.load(json_file)
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
"Error loading json: "
|
||||
"{} - Exception: {}".format(path, exc)
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def get_batch_asset_task_info(ctx):
|
||||
"""Parses context data from webpublisher's batch metadata
|
||||
|
||||
Returns:
|
||||
(tuple): asset, task_name (Optional), task_type
|
||||
"""
|
||||
task_type = "default_task_type"
|
||||
task_name = None
|
||||
asset = None
|
||||
|
||||
if ctx["type"] == "task":
|
||||
items = ctx["path"].split('/')
|
||||
asset = items[-2]
|
||||
task_name = ctx["name"]
|
||||
task_type = ctx["attributes"]["type"]
|
||||
else:
|
||||
asset = ctx["name"]
|
||||
|
||||
return asset, task_name, task_type
|
||||
|
|
|
|||
|
|
@ -1,277 +0,0 @@
|
|||
import os
|
||||
from datetime import datetime
|
||||
import collections
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.util
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client.mongo import OpenPypeMongoConnection
|
||||
from openpype.lib.plugin_tools import parse_json
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.api import get_project_settings
|
||||
|
||||
ERROR_STATUS = "error"
|
||||
IN_PROGRESS_STATUS = "in_progress"
|
||||
REPROCESS_STATUS = "reprocess"
|
||||
SENT_REPROCESSING_STATUS = "sent_for_reprocessing"
|
||||
FINISHED_REPROCESS_STATUS = "republishing_finished"
|
||||
FINISHED_OK_STATUS = "finished_ok"
|
||||
|
||||
|
||||
def headless_publish(log, close_plugin_name=None, is_test=False):
|
||||
"""Runs publish in a opened host with a context and closes Python process.
|
||||
"""
|
||||
if not is_test:
|
||||
dbcon = get_webpublish_conn()
|
||||
_id = os.environ.get("BATCH_LOG_ID")
|
||||
if not _id:
|
||||
log.warning("Unable to store log records, "
|
||||
"batch will be unfinished!")
|
||||
return
|
||||
|
||||
publish_and_log(dbcon, _id, log, close_plugin_name=close_plugin_name)
|
||||
else:
|
||||
publish(log, close_plugin_name)
|
||||
|
||||
|
||||
def get_webpublish_conn():
|
||||
"""Get connection to OP 'webpublishes' collection."""
|
||||
mongo_client = OpenPypeMongoConnection.get_mongo_client()
|
||||
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
|
||||
return mongo_client[database_name]["webpublishes"]
|
||||
|
||||
|
||||
def start_webpublish_log(dbcon, batch_id, user):
|
||||
"""Start new log record for 'batch_id'
|
||||
|
||||
Args:
|
||||
dbcon (OpenPypeMongoConnection)
|
||||
batch_id (str)
|
||||
user (str)
|
||||
Returns
|
||||
(ObjectId) from DB
|
||||
"""
|
||||
return dbcon.insert_one({
|
||||
"batch_id": batch_id,
|
||||
"start_date": datetime.now(),
|
||||
"user": user,
|
||||
"status": IN_PROGRESS_STATUS,
|
||||
"progress": 0 # integer 0-100, percentage
|
||||
}).inserted_id
|
||||
|
||||
|
||||
def publish(log, close_plugin_name=None, raise_error=False):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
log (OpenPypeLogger)
|
||||
close_plugin_name (str): name of plugin with responsibility to
|
||||
close host app
|
||||
"""
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}"
|
||||
|
||||
close_plugin = _get_close_plugin(close_plugin_name, log)
|
||||
|
||||
for result in pyblish.util.publish_iter():
|
||||
for record in result["records"]:
|
||||
log.info("{}: {}".format(
|
||||
result["plugin"].label, record.msg))
|
||||
|
||||
if result["error"]:
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
if raise_error:
|
||||
# Fatal Error is because of Deadline
|
||||
error_message = "Fatal Error: " + error_format.format(**result)
|
||||
raise RuntimeError(error_message)
|
||||
|
||||
|
||||
def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
|
||||
"""Loops through all plugins, logs ok and fails into OP DB.
|
||||
|
||||
Args:
|
||||
dbcon (OpenPypeMongoConnection)
|
||||
_id (str) - id of current job in DB
|
||||
log (OpenPypeLogger)
|
||||
batch_id (str) - id sent from frontend
|
||||
close_plugin_name (str): name of plugin with responsibility to
|
||||
close host app
|
||||
"""
|
||||
# Error exit as soon as any error occurs.
|
||||
error_format = "Failed {plugin.__name__}: {error} -- {error.traceback}\n"
|
||||
error_format += "-" * 80 + "\n"
|
||||
|
||||
close_plugin = _get_close_plugin(close_plugin_name, log)
|
||||
|
||||
if isinstance(_id, str):
|
||||
_id = ObjectId(_id)
|
||||
|
||||
log_lines = []
|
||||
processed = 0
|
||||
log_every = 5
|
||||
for result in pyblish.util.publish_iter():
|
||||
for record in result["records"]:
|
||||
log_lines.append("{}: {}".format(
|
||||
result["plugin"].label, record.msg))
|
||||
processed += 1
|
||||
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
log_lines = [error_format.format(**result)] + log_lines
|
||||
dbcon.update_one(
|
||||
{"_id": _id},
|
||||
{"$set":
|
||||
{
|
||||
"finish_date": datetime.now(),
|
||||
"status": ERROR_STATUS,
|
||||
"log": os.linesep.join(log_lines)
|
||||
|
||||
}}
|
||||
)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
return
|
||||
elif processed % log_every == 0:
|
||||
# pyblish returns progress in 0.0 - 2.0
|
||||
progress = min(round(result["progress"] / 2 * 100), 99)
|
||||
dbcon.update_one(
|
||||
{"_id": _id},
|
||||
{"$set":
|
||||
{
|
||||
"progress": progress,
|
||||
"log": os.linesep.join(log_lines)
|
||||
}}
|
||||
)
|
||||
|
||||
# final update
|
||||
if batch_id:
|
||||
dbcon.update_many(
|
||||
{"batch_id": batch_id, "status": SENT_REPROCESSING_STATUS},
|
||||
{
|
||||
"$set":
|
||||
{
|
||||
"finish_date": datetime.now(),
|
||||
"status": FINISHED_REPROCESS_STATUS,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
dbcon.update_one(
|
||||
{"_id": _id},
|
||||
{
|
||||
"$set":
|
||||
{
|
||||
"finish_date": datetime.now(),
|
||||
"status": FINISHED_OK_STATUS,
|
||||
"progress": 100,
|
||||
"log": os.linesep.join(log_lines)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def fail_batch(_id, dbcon, msg):
|
||||
"""Set current batch as failed as there is some problem.
|
||||
|
||||
Raises:
|
||||
ValueError
|
||||
"""
|
||||
dbcon.update_one(
|
||||
{"_id": _id},
|
||||
{"$set":
|
||||
{
|
||||
"finish_date": datetime.now(),
|
||||
"status": ERROR_STATUS,
|
||||
"log": msg
|
||||
|
||||
}}
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def find_variant_key(application_manager, host):
|
||||
"""Searches for latest installed variant for 'host'
|
||||
|
||||
Args:
|
||||
application_manager (ApplicationManager)
|
||||
host (str)
|
||||
Returns
|
||||
(string) (optional)
|
||||
Raises:
|
||||
(ValueError) if no variant found
|
||||
"""
|
||||
app_group = application_manager.app_groups.get(host)
|
||||
if not app_group or not app_group.enabled:
|
||||
raise ValueError("No application {} configured".format(host))
|
||||
|
||||
found_variant_key = None
|
||||
# finds most up-to-date variant if any installed
|
||||
sorted_variants = collections.OrderedDict(
|
||||
sorted(app_group.variants.items()))
|
||||
for variant_key, variant in sorted_variants.items():
|
||||
for executable in variant.executables:
|
||||
if executable.exists():
|
||||
found_variant_key = variant_key
|
||||
|
||||
if not found_variant_key:
|
||||
raise ValueError("No executable for {} found".format(host))
|
||||
|
||||
return found_variant_key
|
||||
|
||||
|
||||
def _get_close_plugin(close_plugin_name, log):
|
||||
if close_plugin_name:
|
||||
plugins = pyblish.api.discover()
|
||||
for plugin in plugins:
|
||||
if plugin.__name__ == close_plugin_name:
|
||||
return plugin
|
||||
|
||||
log.debug("Close plugin not found, app might not close.")
|
||||
|
||||
|
||||
def get_task_data(batch_dir):
|
||||
"""Return parsed data from first task manifest.json
|
||||
|
||||
Used for `remotepublishfromapp` command where batch contains only
|
||||
single task with publishable workfile.
|
||||
|
||||
Returns:
|
||||
(dict)
|
||||
Throws:
|
||||
(ValueError) if batch or task manifest not found or broken
|
||||
"""
|
||||
batch_data = parse_json(os.path.join(batch_dir, "manifest.json"))
|
||||
if not batch_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(batch_dir))
|
||||
task_dir_name = batch_data["tasks"][0]
|
||||
task_data = parse_json(os.path.join(batch_dir, task_dir_name,
|
||||
"manifest.json"))
|
||||
if not task_data:
|
||||
raise ValueError(
|
||||
"Cannot parse batch meta in {} folder".format(task_data))
|
||||
|
||||
return task_data
|
||||
|
||||
|
||||
def get_timeout(project_name, host_name, task_type):
|
||||
"""Returns timeout(seconds) from Setting profile."""
|
||||
filter_data = {
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
timeout_profiles = (get_project_settings(project_name)["webpublisher"]
|
||||
["timeout_profiles"])
|
||||
matching_item = filter_profiles(timeout_profiles, filter_data)
|
||||
timeout = 3600
|
||||
if matching_item:
|
||||
timeout = matching_item["timeout"]
|
||||
|
||||
return timeout
|
||||
Loading…
Add table
Add a link
Reference in a new issue