mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
rename openpype > ayon_core
This commit is contained in:
parent
f59781f077
commit
a4f2f76fbf
3135 changed files with 3 additions and 3 deletions
297
client/ayon_core/lib/__init__.py
Normal file
297
client/ayon_core/lib/__init__.py
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# flake8: noqa E402
|
||||
"""OpenPype lib functions."""
|
||||
# add vendor to sys path based on Python version
|
||||
import sys
|
||||
import os
|
||||
import site
|
||||
from openpype import PACKAGE_DIR
|
||||
|
||||
# Add Python version specific vendor folder
|
||||
python_version_dir = os.path.join(
|
||||
PACKAGE_DIR, "vendor", "python", "python_{}".format(sys.version[0])
|
||||
)
|
||||
# Prepend path in sys paths
|
||||
sys.path.insert(0, python_version_dir)
|
||||
site.addsitedir(python_version_dir)
|
||||
|
||||
|
||||
from .events import (
|
||||
emit_event,
|
||||
register_event_callback
|
||||
)
|
||||
|
||||
from .vendor_bin_utils import (
|
||||
ToolNotFoundError,
|
||||
find_executable,
|
||||
get_vendor_bin_path,
|
||||
get_oiio_tools_path,
|
||||
get_oiio_tool_args,
|
||||
get_ffmpeg_tool_path,
|
||||
get_ffmpeg_tool_args,
|
||||
is_oiio_supported,
|
||||
)
|
||||
|
||||
from .attribute_definitions import (
|
||||
AbstractAttrDef,
|
||||
|
||||
UIDef,
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
|
||||
UnknownDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
BoolDef,
|
||||
FileDef,
|
||||
FileDefItem,
|
||||
)
|
||||
|
||||
from .env_tools import (
|
||||
env_value_to_bool,
|
||||
get_paths_from_environ,
|
||||
)
|
||||
|
||||
from .terminal import Terminal
|
||||
from .execute import (
|
||||
get_ayon_launcher_args,
|
||||
get_openpype_execute_args,
|
||||
get_linux_launcher_args,
|
||||
execute,
|
||||
run_subprocess,
|
||||
run_detached_process,
|
||||
run_ayon_launcher_process,
|
||||
run_openpype_process,
|
||||
clean_envs_for_openpype_process,
|
||||
path_to_subprocess_arg,
|
||||
CREATE_NO_WINDOW
|
||||
)
|
||||
from .log import (
|
||||
Logger,
|
||||
)
|
||||
|
||||
from .path_templates import (
|
||||
merge_dict,
|
||||
TemplateMissingKey,
|
||||
TemplateUnsolved,
|
||||
StringTemplate,
|
||||
TemplatesDict,
|
||||
FormatObject,
|
||||
)
|
||||
|
||||
from .dateutils import (
|
||||
get_datetime_data,
|
||||
get_timestamp,
|
||||
get_formatted_current_time
|
||||
)
|
||||
|
||||
from .python_module_tools import (
|
||||
import_filepath,
|
||||
modules_from_path,
|
||||
recursive_bases_from_class,
|
||||
classes_from_module,
|
||||
import_module_from_dirpath,
|
||||
is_func_signature_supported,
|
||||
)
|
||||
|
||||
from .profiles_filtering import (
|
||||
compile_list_of_regexes,
|
||||
filter_profiles
|
||||
)
|
||||
|
||||
from .transcoding import (
|
||||
get_transcode_temp_directory,
|
||||
should_convert_for_ffmpeg,
|
||||
convert_for_ffmpeg,
|
||||
convert_input_paths_for_ffmpeg,
|
||||
get_ffprobe_data,
|
||||
get_ffprobe_streams,
|
||||
get_ffmpeg_codec_args,
|
||||
get_ffmpeg_format_args,
|
||||
convert_ffprobe_fps_value,
|
||||
convert_ffprobe_fps_to_float,
|
||||
get_rescaled_command_arguments,
|
||||
)
|
||||
|
||||
from .local_settings import (
|
||||
IniSettingRegistry,
|
||||
JSONSettingRegistry,
|
||||
OpenPypeSecureRegistry,
|
||||
OpenPypeSettingsRegistry,
|
||||
get_local_site_id,
|
||||
change_openpype_mongo_url,
|
||||
get_openpype_username,
|
||||
is_admin_password_required
|
||||
)
|
||||
|
||||
from .applications import (
|
||||
ApplicationLaunchFailed,
|
||||
ApplictionExecutableNotFound,
|
||||
ApplicationNotFound,
|
||||
ApplicationManager,
|
||||
|
||||
PreLaunchHook,
|
||||
PostLaunchHook,
|
||||
|
||||
EnvironmentPrepData,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value
|
||||
)
|
||||
|
||||
from .plugin_tools import (
|
||||
prepare_template_data,
|
||||
source_hash,
|
||||
)
|
||||
|
||||
from .path_tools import (
|
||||
format_file_size,
|
||||
collect_frames,
|
||||
create_hard_link,
|
||||
version_up,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
)
|
||||
|
||||
from .openpype_version import (
|
||||
op_version_control_available,
|
||||
get_openpype_version,
|
||||
get_build_version,
|
||||
get_expected_version,
|
||||
is_running_from_build,
|
||||
is_running_staging,
|
||||
is_current_version_studio_latest,
|
||||
is_current_version_higher_than_expected
|
||||
)
|
||||
|
||||
|
||||
from .connections import (
|
||||
requests_get,
|
||||
requests_post
|
||||
)
|
||||
|
||||
terminal = Terminal
|
||||
|
||||
__all__ = [
|
||||
"emit_event",
|
||||
"register_event_callback",
|
||||
|
||||
"get_ayon_launcher_args",
|
||||
"get_openpype_execute_args",
|
||||
"get_linux_launcher_args",
|
||||
"execute",
|
||||
"run_subprocess",
|
||||
"run_detached_process",
|
||||
"run_ayon_launcher_process",
|
||||
"run_openpype_process",
|
||||
"clean_envs_for_openpype_process",
|
||||
"path_to_subprocess_arg",
|
||||
"CREATE_NO_WINDOW",
|
||||
|
||||
"env_value_to_bool",
|
||||
"get_paths_from_environ",
|
||||
|
||||
"ToolNotFoundError",
|
||||
"find_executable",
|
||||
"get_vendor_bin_path",
|
||||
"get_oiio_tools_path",
|
||||
"get_oiio_tool_args",
|
||||
"get_ffmpeg_tool_path",
|
||||
"get_ffmpeg_tool_args",
|
||||
"is_oiio_supported",
|
||||
|
||||
"AbstractAttrDef",
|
||||
|
||||
"UIDef",
|
||||
"UISeparatorDef",
|
||||
"UILabelDef",
|
||||
|
||||
"UnknownDef",
|
||||
"NumberDef",
|
||||
"TextDef",
|
||||
"EnumDef",
|
||||
"BoolDef",
|
||||
"FileDef",
|
||||
"FileDefItem",
|
||||
|
||||
"import_filepath",
|
||||
"modules_from_path",
|
||||
"recursive_bases_from_class",
|
||||
"classes_from_module",
|
||||
"import_module_from_dirpath",
|
||||
"is_func_signature_supported",
|
||||
|
||||
"get_transcode_temp_directory",
|
||||
"should_convert_for_ffmpeg",
|
||||
"convert_for_ffmpeg",
|
||||
"convert_input_paths_for_ffmpeg",
|
||||
"get_ffprobe_data",
|
||||
"get_ffprobe_streams",
|
||||
"get_ffmpeg_codec_args",
|
||||
"get_ffmpeg_format_args",
|
||||
"convert_ffprobe_fps_value",
|
||||
"convert_ffprobe_fps_to_float",
|
||||
"get_rescaled_command_arguments",
|
||||
|
||||
"IniSettingRegistry",
|
||||
"JSONSettingRegistry",
|
||||
"OpenPypeSecureRegistry",
|
||||
"OpenPypeSettingsRegistry",
|
||||
"get_local_site_id",
|
||||
"change_openpype_mongo_url",
|
||||
"get_openpype_username",
|
||||
"is_admin_password_required",
|
||||
|
||||
"ApplicationLaunchFailed",
|
||||
"ApplictionExecutableNotFound",
|
||||
"ApplicationNotFound",
|
||||
"ApplicationManager",
|
||||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
"EnvironmentPrepData",
|
||||
"prepare_app_environments",
|
||||
"prepare_context_environments",
|
||||
"get_app_environments_for_context",
|
||||
"apply_project_environments_value",
|
||||
|
||||
"compile_list_of_regexes",
|
||||
|
||||
"filter_profiles",
|
||||
|
||||
"prepare_template_data",
|
||||
"source_hash",
|
||||
|
||||
"format_file_size",
|
||||
"collect_frames",
|
||||
"create_hard_link",
|
||||
"version_up",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
|
||||
"merge_dict",
|
||||
"TemplateMissingKey",
|
||||
"TemplateUnsolved",
|
||||
"StringTemplate",
|
||||
"TemplatesDict",
|
||||
"FormatObject",
|
||||
|
||||
"terminal",
|
||||
|
||||
"get_datetime_data",
|
||||
"get_formatted_current_time",
|
||||
|
||||
"Logger",
|
||||
|
||||
"op_version_control_available",
|
||||
"get_openpype_version",
|
||||
"get_build_version",
|
||||
"get_expected_version",
|
||||
"is_running_from_build",
|
||||
"is_running_staging",
|
||||
"is_current_version_studio_latest",
|
||||
|
||||
"requests_get",
|
||||
"requests_post"
|
||||
]
|
||||
2064
client/ayon_core/lib/applications.py
Normal file
2064
client/ayon_core/lib/applications.py
Normal file
File diff suppressed because it is too large
Load diff
986
client/ayon_core/lib/attribute_definitions.py
Normal file
986
client/ayon_core/lib/attribute_definitions.py
Normal file
|
|
@ -0,0 +1,986 @@
|
|||
import os
|
||||
import re
|
||||
import collections
|
||||
import uuid
|
||||
import json
|
||||
import copy
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
import clique
|
||||
|
||||
# Global variable which store attribute definitions by type
|
||||
# - default types are registered on import
|
||||
_attr_defs_by_type = {}
|
||||
|
||||
|
||||
def register_attr_def_class(cls):
|
||||
"""Register attribute definition.
|
||||
|
||||
Currently are registered definitions used to deserialize data to objects.
|
||||
|
||||
Attrs:
|
||||
cls (AbstractAttrDef): Non-abstract class to be registered with unique
|
||||
'type' attribute.
|
||||
|
||||
Raises:
|
||||
KeyError: When type was already registered.
|
||||
"""
|
||||
|
||||
if cls.type in _attr_defs_by_type:
|
||||
raise KeyError("Type \"{}\" was already registered".format(cls.type))
|
||||
_attr_defs_by_type[cls.type] = cls
|
||||
|
||||
|
||||
def get_attributes_keys(attribute_definitions):
|
||||
"""Collect keys from list of attribute definitions.
|
||||
|
||||
Args:
|
||||
attribute_definitions (List[AbstractAttrDef]): Objects of attribute
|
||||
definitions.
|
||||
|
||||
Returns:
|
||||
Set[str]: Keys that will be created using passed attribute definitions.
|
||||
"""
|
||||
|
||||
keys = set()
|
||||
if not attribute_definitions:
|
||||
return keys
|
||||
|
||||
for attribute_def in attribute_definitions:
|
||||
if not isinstance(attribute_def, UIDef):
|
||||
keys.add(attribute_def.key)
|
||||
return keys
|
||||
|
||||
|
||||
def get_default_values(attribute_definitions):
|
||||
"""Receive default values for attribute definitions.
|
||||
|
||||
Args:
|
||||
attribute_definitions (List[AbstractAttrDef]): Attribute definitions
|
||||
for which default values should be collected.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Default values for passet attribute definitions.
|
||||
"""
|
||||
|
||||
output = {}
|
||||
if not attribute_definitions:
|
||||
return output
|
||||
|
||||
for attr_def in attribute_definitions:
|
||||
# Skip UI definitions
|
||||
if not isinstance(attr_def, UIDef):
|
||||
output[attr_def.key] = attr_def.default
|
||||
return output
|
||||
|
||||
|
||||
class AbstractAttrDefMeta(ABCMeta):
|
||||
"""Metaclass to validate existence of 'key' attribute.
|
||||
|
||||
Each object of `AbstractAttrDef` mus have defined 'key' attribute.
|
||||
"""
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
obj = super(AbstractAttrDefMeta, self).__call__(*args, **kwargs)
|
||||
init_class = getattr(obj, "__init__class__", None)
|
||||
if init_class is not AbstractAttrDef:
|
||||
raise TypeError("{} super was not called in __init__.".format(
|
||||
type(obj)
|
||||
))
|
||||
return obj
|
||||
|
||||
|
||||
@six.add_metaclass(AbstractAttrDefMeta)
|
||||
class AbstractAttrDef(object):
|
||||
"""Abstraction of attribute definition.
|
||||
|
||||
Each attribute definition must have implemented validation and
|
||||
conversion method.
|
||||
|
||||
Attribute definition should have ability to return "default" value. That
|
||||
can be based on passed data into `__init__` so is not abstracted to
|
||||
attribute.
|
||||
|
||||
QUESTION:
|
||||
How to force to set `key` attribute?
|
||||
|
||||
Args:
|
||||
key (str): Under which key will be attribute value stored.
|
||||
default (Any): Default value of an attribute.
|
||||
label (str): Attribute label.
|
||||
tooltip (str): Attribute tooltip.
|
||||
is_label_horizontal (bool): UI specific argument. Specify if label is
|
||||
next to value input or ahead.
|
||||
hidden (bool): Will be item hidden (for UI purposes).
|
||||
disabled (bool): Item will be visible but disabled (for UI purposes).
|
||||
"""
|
||||
|
||||
type_attributes = []
|
||||
|
||||
is_value_def = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
key,
|
||||
default,
|
||||
label=None,
|
||||
tooltip=None,
|
||||
is_label_horizontal=None,
|
||||
hidden=False,
|
||||
disabled=False
|
||||
):
|
||||
if is_label_horizontal is None:
|
||||
is_label_horizontal = True
|
||||
|
||||
if hidden is None:
|
||||
hidden = False
|
||||
|
||||
self.key = key
|
||||
self.label = label
|
||||
self.tooltip = tooltip
|
||||
self.default = default
|
||||
self.is_label_horizontal = is_label_horizontal
|
||||
self.hidden = hidden
|
||||
self.disabled = disabled
|
||||
self._id = uuid.uuid4().hex
|
||||
|
||||
self.__init__class__ = AbstractAttrDef
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return (
|
||||
self.key == other.key
|
||||
and self.hidden == other.hidden
|
||||
and self.default == other.default
|
||||
and self.disabled == other.disabled
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
@abstractproperty
|
||||
def type(self):
|
||||
"""Attribute definition type also used as identifier of class.
|
||||
|
||||
Returns:
|
||||
str: Type of attribute definition.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def convert_value(self, value):
|
||||
"""Convert value to a valid one.
|
||||
|
||||
Convert passed value to a valid type. Use default if value can't be
|
||||
converted.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def serialize(self):
|
||||
"""Serialize object to data so it's possible to recreate it.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Serialized object that can be passed to
|
||||
'deserialize' method.
|
||||
"""
|
||||
|
||||
data = {
|
||||
"type": self.type,
|
||||
"key": self.key,
|
||||
"label": self.label,
|
||||
"tooltip": self.tooltip,
|
||||
"default": self.default,
|
||||
"is_label_horizontal": self.is_label_horizontal,
|
||||
"hidden": self.hidden,
|
||||
"disabled": self.disabled
|
||||
}
|
||||
for attr in self.type_attributes:
|
||||
data[attr] = getattr(self, attr)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, data):
|
||||
"""Recreate object from data.
|
||||
|
||||
Data can be received using 'serialize' method.
|
||||
"""
|
||||
|
||||
return cls(**data)
|
||||
|
||||
|
||||
# -----------------------------------------
|
||||
# UI attribute definitoins won't hold value
|
||||
# -----------------------------------------
|
||||
|
||||
class UIDef(AbstractAttrDef):
|
||||
is_value_def = False
|
||||
|
||||
def __init__(self, key=None, default=None, *args, **kwargs):
|
||||
super(UIDef, self).__init__(key, default, *args, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class UISeparatorDef(UIDef):
|
||||
type = "separator"
|
||||
|
||||
|
||||
class UILabelDef(UIDef):
|
||||
type = "label"
|
||||
|
||||
def __init__(self, label, key=None):
|
||||
super(UILabelDef, self).__init__(label=label, key=key)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(UILabelDef, self).__eq__(other):
|
||||
return False
|
||||
return self.label == other.label
|
||||
|
||||
|
||||
# ---------------------------------------
|
||||
# Attribute defintioins should hold value
|
||||
# ---------------------------------------
|
||||
|
||||
class UnknownDef(AbstractAttrDef):
|
||||
"""Definition is not known because definition is not available.
|
||||
|
||||
This attribute can be used to keep existing data unchanged but does not
|
||||
have known definition of type.
|
||||
"""
|
||||
|
||||
type = "unknown"
|
||||
|
||||
def __init__(self, key, default=None, **kwargs):
|
||||
kwargs["default"] = default
|
||||
super(UnknownDef, self).__init__(key, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class HiddenDef(AbstractAttrDef):
|
||||
"""Hidden value of Any type.
|
||||
|
||||
This attribute can be used for UI purposes to pass values related
|
||||
to other attributes (e.g. in multi-page UIs).
|
||||
|
||||
Keep in mind the value should be possible to parse by json parser.
|
||||
"""
|
||||
|
||||
type = "hidden"
|
||||
|
||||
def __init__(self, key, default=None, **kwargs):
|
||||
kwargs["default"] = default
|
||||
kwargs["hidden"] = True
|
||||
super(UnknownDef, self).__init__(key, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class NumberDef(AbstractAttrDef):
|
||||
"""Number definition.
|
||||
|
||||
Number can have defined minimum/maximum value and decimal points. Value
|
||||
is integer if decimals are 0.
|
||||
|
||||
Args:
|
||||
minimum(int, float): Minimum possible value.
|
||||
maximum(int, float): Maximum possible value.
|
||||
decimals(int): Maximum decimal points of value.
|
||||
default(int, float): Default value for conversion.
|
||||
"""
|
||||
|
||||
type = "number"
|
||||
type_attributes = [
|
||||
"minimum",
|
||||
"maximum",
|
||||
"decimals"
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self, key, minimum=None, maximum=None, decimals=None, default=None,
|
||||
**kwargs
|
||||
):
|
||||
minimum = 0 if minimum is None else minimum
|
||||
maximum = 999999 if maximum is None else maximum
|
||||
# Swap min/max when are passed in opposited order
|
||||
if minimum > maximum:
|
||||
maximum, minimum = minimum, maximum
|
||||
|
||||
if default is None:
|
||||
default = 0
|
||||
|
||||
elif not isinstance(default, (int, float)):
|
||||
raise TypeError((
|
||||
"'default' argument must be 'int' or 'float', not '{}'"
|
||||
).format(type(default)))
|
||||
|
||||
# Fix default value by mim/max values
|
||||
if default < minimum:
|
||||
default = minimum
|
||||
|
||||
elif default > maximum:
|
||||
default = maximum
|
||||
|
||||
super(NumberDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
self.minimum = minimum
|
||||
self.maximum = maximum
|
||||
self.decimals = 0 if decimals is None else decimals
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(NumberDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.decimals == other.decimals
|
||||
and self.maximum == other.maximum
|
||||
and self.maximum == other.maximum
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
try:
|
||||
value = float(value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not isinstance(value, (int, float)):
|
||||
return self.default
|
||||
|
||||
if self.decimals == 0:
|
||||
return int(value)
|
||||
return round(float(value), self.decimals)
|
||||
|
||||
|
||||
class TextDef(AbstractAttrDef):
|
||||
"""Text definition.
|
||||
|
||||
Text can have multiline option so endline characters are allowed regex
|
||||
validation can be applied placeholder for UI purposes and default value.
|
||||
|
||||
Regex validation is not part of attribute implemntentation.
|
||||
|
||||
Args:
|
||||
multiline(bool): Text has single or multiline support.
|
||||
regex(str, re.Pattern): Regex validation.
|
||||
placeholder(str): UI placeholder for attribute.
|
||||
default(str, None): Default value. Empty string used when not defined.
|
||||
"""
|
||||
|
||||
type = "text"
|
||||
type_attributes = [
|
||||
"multiline",
|
||||
"placeholder",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self, key, multiline=None, regex=None, placeholder=None, default=None,
|
||||
**kwargs
|
||||
):
|
||||
if default is None:
|
||||
default = ""
|
||||
|
||||
super(TextDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
if multiline is None:
|
||||
multiline = False
|
||||
|
||||
elif not isinstance(default, six.string_types):
|
||||
raise TypeError((
|
||||
"'default' argument must be a {}, not '{}'"
|
||||
).format(six.string_types, type(default)))
|
||||
|
||||
if isinstance(regex, six.string_types):
|
||||
regex = re.compile(regex)
|
||||
|
||||
self.multiline = multiline
|
||||
self.placeholder = placeholder
|
||||
self.regex = regex
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(TextDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.multiline == other.multiline
|
||||
and self.regex == other.regex
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
return value
|
||||
return self.default
|
||||
|
||||
def serialize(self):
|
||||
data = super(TextDef, self).serialize()
|
||||
data["regex"] = self.regex.pattern
|
||||
return data
|
||||
|
||||
|
||||
class EnumDef(AbstractAttrDef):
|
||||
"""Enumeration of items.
|
||||
|
||||
Enumeration of single item from items. Or list of items if multiselection
|
||||
is enabled.
|
||||
|
||||
Args:
|
||||
items (Union[list[str], list[dict[str, Any]]): Items definition that
|
||||
can be converted using 'prepare_enum_items'.
|
||||
default (Optional[Any]): Default value. Must be one key(value) from
|
||||
passed items or list of values for multiselection.
|
||||
multiselection (Optional[bool]): If True, multiselection is allowed.
|
||||
Output is list of selected items.
|
||||
"""
|
||||
|
||||
type = "enum"
|
||||
|
||||
def __init__(
|
||||
self, key, items, default=None, multiselection=False, **kwargs
|
||||
):
|
||||
if not items:
|
||||
raise ValueError((
|
||||
"Empty 'items' value. {} must have"
|
||||
" defined values on initialization."
|
||||
).format(self.__class__.__name__))
|
||||
|
||||
items = self.prepare_enum_items(items)
|
||||
item_values = [item["value"] for item in items]
|
||||
item_values_set = set(item_values)
|
||||
if multiselection:
|
||||
if default is None:
|
||||
default = []
|
||||
default = list(item_values_set.intersection(default))
|
||||
|
||||
elif default not in item_values:
|
||||
default = next(iter(item_values), None)
|
||||
|
||||
super(EnumDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
self.items = items
|
||||
self._item_values = item_values_set
|
||||
self.multiselection = multiselection
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(EnumDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.items == other.items
|
||||
and self.multiselection == other.multiselection
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if not self.multiselection:
|
||||
if value in self._item_values:
|
||||
return value
|
||||
return self.default
|
||||
|
||||
if value is None:
|
||||
return copy.deepcopy(self.default)
|
||||
return list(self._item_values.intersection(value))
|
||||
|
||||
def serialize(self):
|
||||
data = super(EnumDef, self).serialize()
|
||||
data["items"] = copy.deepcopy(self.items)
|
||||
data["multiselection"] = self.multiselection
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def prepare_enum_items(items):
|
||||
"""Convert items to unified structure.
|
||||
|
||||
Output is a list where each item is dictionary with 'value'
|
||||
and 'label'.
|
||||
|
||||
```python
|
||||
# Example output
|
||||
[
|
||||
{"label": "Option 1", "value": 1},
|
||||
{"label": "Option 2", "value": 2},
|
||||
{"label": "Option 3", "value": 3}
|
||||
]
|
||||
```
|
||||
|
||||
Args:
|
||||
items (Union[Dict[str, Any], List[Any], List[Dict[str, Any]]): The
|
||||
items to convert.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Unified structure of items.
|
||||
"""
|
||||
|
||||
output = []
|
||||
if isinstance(items, dict):
|
||||
for value, label in items.items():
|
||||
output.append({"label": label, "value": value})
|
||||
|
||||
elif isinstance(items, (tuple, list, set)):
|
||||
for item in items:
|
||||
if isinstance(item, dict):
|
||||
# Validate if 'value' is available
|
||||
if "value" not in item:
|
||||
raise KeyError("Item does not contain 'value' key.")
|
||||
|
||||
if "label" not in item:
|
||||
item["label"] = str(item["value"])
|
||||
elif isinstance(item, (list, tuple)):
|
||||
if len(item) == 2:
|
||||
value, label = item
|
||||
elif len(item) == 1:
|
||||
value = item[0]
|
||||
label = str(value)
|
||||
else:
|
||||
raise ValueError((
|
||||
"Invalid items count {}."
|
||||
" Expected 1 or 2. Value: {}"
|
||||
).format(len(item), str(item)))
|
||||
|
||||
item = {"label": label, "value": value}
|
||||
else:
|
||||
item = {"label": str(item), "value": item}
|
||||
output.append(item)
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unknown type for enum items '{}'".format(type(items))
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
class BoolDef(AbstractAttrDef):
|
||||
"""Boolean representation.
|
||||
|
||||
Args:
|
||||
default(bool): Default value. Set to `False` if not defined.
|
||||
"""
|
||||
|
||||
type = "bool"
|
||||
|
||||
def __init__(self, key, default=None, **kwargs):
|
||||
if default is None:
|
||||
default = False
|
||||
super(BoolDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
return self.default
|
||||
|
||||
|
||||
class FileDefItem(object):
|
||||
def __init__(
|
||||
self, directory, filenames, frames=None, template=None
|
||||
):
|
||||
self.directory = directory
|
||||
|
||||
self.filenames = []
|
||||
self.is_sequence = False
|
||||
self.template = None
|
||||
self.frames = []
|
||||
self.is_empty = True
|
||||
|
||||
self.set_filenames(filenames, frames, template)
|
||||
|
||||
def __str__(self):
|
||||
return json.dumps(self.to_dict())
|
||||
|
||||
def __repr__(self):
|
||||
if self.is_empty:
|
||||
filename = "< empty >"
|
||||
elif self.is_sequence:
|
||||
filename = self.template
|
||||
else:
|
||||
filename = self.filenames[0]
|
||||
|
||||
return "<{}: \"{}\">".format(
|
||||
self.__class__.__name__,
|
||||
os.path.join(self.directory, filename)
|
||||
)
|
||||
|
||||
@property
|
||||
def label(self):
|
||||
if self.is_empty:
|
||||
return None
|
||||
|
||||
if not self.is_sequence:
|
||||
return self.filenames[0]
|
||||
|
||||
frame_start = self.frames[0]
|
||||
filename_template = os.path.basename(self.template)
|
||||
if len(self.frames) == 1:
|
||||
return "{} [{}]".format(filename_template, frame_start)
|
||||
|
||||
frame_end = self.frames[-1]
|
||||
expected_len = (frame_end - frame_start) + 1
|
||||
if expected_len == len(self.frames):
|
||||
return "{} [{}-{}]".format(
|
||||
filename_template, frame_start, frame_end
|
||||
)
|
||||
|
||||
ranges = []
|
||||
_frame_start = None
|
||||
_frame_end = None
|
||||
for frame in range(frame_start, frame_end + 1):
|
||||
if frame not in self.frames:
|
||||
add_to_ranges = _frame_start is not None
|
||||
elif _frame_start is None:
|
||||
_frame_start = _frame_end = frame
|
||||
add_to_ranges = frame == frame_end
|
||||
else:
|
||||
_frame_end = frame
|
||||
add_to_ranges = frame == frame_end
|
||||
|
||||
if add_to_ranges:
|
||||
if _frame_start != _frame_end:
|
||||
_range = "{}-{}".format(_frame_start, _frame_end)
|
||||
else:
|
||||
_range = str(_frame_start)
|
||||
ranges.append(_range)
|
||||
_frame_start = _frame_end = None
|
||||
return "{} [{}]".format(
|
||||
filename_template, ",".join(ranges)
|
||||
)
|
||||
|
||||
def split_sequence(self):
|
||||
if not self.is_sequence:
|
||||
raise ValueError("Cannot split single file item")
|
||||
|
||||
paths = [
|
||||
os.path.join(self.directory, filename)
|
||||
for filename in self.filenames
|
||||
]
|
||||
return self.from_paths(paths, False)
|
||||
|
||||
@property
|
||||
def ext(self):
|
||||
if self.is_empty:
|
||||
return None
|
||||
_, ext = os.path.splitext(self.filenames[0])
|
||||
if ext:
|
||||
return ext
|
||||
return None
|
||||
|
||||
@property
|
||||
def lower_ext(self):
|
||||
ext = self.ext
|
||||
if ext is not None:
|
||||
return ext.lower()
|
||||
return ext
|
||||
|
||||
@property
|
||||
def is_dir(self):
|
||||
if self.is_empty:
|
||||
return False
|
||||
|
||||
# QUESTION a better way how to define folder (in init argument?)
|
||||
if self.ext:
|
||||
return False
|
||||
return True
|
||||
|
||||
def set_directory(self, directory):
|
||||
self.directory = directory
|
||||
|
||||
def set_filenames(self, filenames, frames=None, template=None):
|
||||
if frames is None:
|
||||
frames = []
|
||||
is_sequence = False
|
||||
if frames:
|
||||
is_sequence = True
|
||||
|
||||
if is_sequence and not template:
|
||||
raise ValueError("Missing template for sequence")
|
||||
|
||||
self.is_empty = len(filenames) == 0
|
||||
self.filenames = filenames
|
||||
self.template = template
|
||||
self.frames = frames
|
||||
self.is_sequence = is_sequence
|
||||
|
||||
@classmethod
|
||||
def create_empty_item(cls):
|
||||
return cls("", "")
|
||||
|
||||
@classmethod
|
||||
def from_value(cls, value, allow_sequences):
|
||||
"""Convert passed value to FileDefItem objects.
|
||||
|
||||
Returns:
|
||||
list: Created FileDefItem objects.
|
||||
"""
|
||||
|
||||
# Convert single item to iterable
|
||||
if not isinstance(value, (list, tuple, set)):
|
||||
value = [value]
|
||||
|
||||
output = []
|
||||
str_filepaths = []
|
||||
for item in value:
|
||||
if isinstance(item, dict):
|
||||
item = cls.from_dict(item)
|
||||
|
||||
if isinstance(item, FileDefItem):
|
||||
if not allow_sequences and item.is_sequence:
|
||||
output.extend(item.split_sequence())
|
||||
else:
|
||||
output.append(item)
|
||||
|
||||
elif isinstance(item, six.string_types):
|
||||
str_filepaths.append(item)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Unknown type \"{}\". Can't convert to {}".format(
|
||||
str(type(item)), cls.__name__
|
||||
)
|
||||
)
|
||||
|
||||
if str_filepaths:
|
||||
output.extend(cls.from_paths(str_filepaths, allow_sequences))
|
||||
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
data["directory"],
|
||||
data["filenames"],
|
||||
data.get("frames"),
|
||||
data.get("template")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_paths(cls, paths, allow_sequences):
|
||||
filenames_by_dir = collections.defaultdict(list)
|
||||
for path in paths:
|
||||
normalized = os.path.normpath(path)
|
||||
directory, filename = os.path.split(normalized)
|
||||
filenames_by_dir[directory].append(filename)
|
||||
|
||||
output = []
|
||||
for directory, filenames in filenames_by_dir.items():
|
||||
if allow_sequences:
|
||||
cols, remainders = clique.assemble(filenames)
|
||||
else:
|
||||
cols = []
|
||||
remainders = filenames
|
||||
|
||||
for remainder in remainders:
|
||||
output.append(cls(directory, [remainder]))
|
||||
|
||||
for col in cols:
|
||||
frames = list(col.indexes)
|
||||
paths = [filename for filename in col]
|
||||
template = col.format("{head}{padding}{tail}")
|
||||
|
||||
output.append(cls(
|
||||
directory, paths, frames, template
|
||||
))
|
||||
|
||||
return output
|
||||
|
||||
def to_dict(self):
|
||||
output = {
|
||||
"is_sequence": self.is_sequence,
|
||||
"directory": self.directory,
|
||||
"filenames": list(self.filenames),
|
||||
}
|
||||
if self.is_sequence:
|
||||
output.update({
|
||||
"template": self.template,
|
||||
"frames": list(sorted(self.frames)),
|
||||
})
|
||||
|
||||
return output
|
||||
|
||||
|
||||
class FileDef(AbstractAttrDef):
|
||||
"""File definition.
|
||||
It is possible to define filters of allowed file extensions and if supports
|
||||
folders.
|
||||
Args:
|
||||
single_item(bool): Allow only single path item.
|
||||
folders(bool): Allow folder paths.
|
||||
extensions(List[str]): Allow files with extensions. Empty list will
|
||||
allow all extensions and None will disable files completely.
|
||||
extensions_label(str): Custom label shown instead of extensions in UI.
|
||||
default(str, List[str]): Default value.
|
||||
"""
|
||||
|
||||
type = "path"
|
||||
type_attributes = [
|
||||
"single_item",
|
||||
"folders",
|
||||
"extensions",
|
||||
"allow_sequences",
|
||||
"extensions_label",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self, key, single_item=True, folders=None, extensions=None,
|
||||
allow_sequences=True, extensions_label=None, default=None, **kwargs
|
||||
):
|
||||
if folders is None and extensions is None:
|
||||
folders = True
|
||||
extensions = []
|
||||
|
||||
if default is None:
|
||||
if single_item:
|
||||
default = FileDefItem.create_empty_item().to_dict()
|
||||
else:
|
||||
default = []
|
||||
else:
|
||||
if single_item:
|
||||
if isinstance(default, dict):
|
||||
FileDefItem.from_dict(default)
|
||||
|
||||
elif isinstance(default, six.string_types):
|
||||
default = FileDefItem.from_paths([default.strip()])[0]
|
||||
|
||||
else:
|
||||
raise TypeError((
|
||||
"'default' argument must be 'str' or 'dict' not '{}'"
|
||||
).format(type(default)))
|
||||
|
||||
else:
|
||||
if not isinstance(default, (tuple, list, set)):
|
||||
raise TypeError((
|
||||
"'default' argument must be 'list', 'tuple' or 'set'"
|
||||
", not '{}'"
|
||||
).format(type(default)))
|
||||
|
||||
# Change horizontal label
|
||||
is_label_horizontal = kwargs.get("is_label_horizontal")
|
||||
if is_label_horizontal is None:
|
||||
kwargs["is_label_horizontal"] = False
|
||||
|
||||
self.single_item = single_item
|
||||
self.folders = folders
|
||||
self.extensions = set(extensions)
|
||||
self.allow_sequences = allow_sequences
|
||||
self.extensions_label = extensions_label
|
||||
super(FileDef, self).__init__(key, default=default, **kwargs)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not super(FileDef, self).__eq__(other):
|
||||
return False
|
||||
|
||||
return (
|
||||
self.single_item == other.single_item
|
||||
and self.folders == other.folders
|
||||
and self.extensions == other.extensions
|
||||
and self.allow_sequences == other.allow_sequences
|
||||
)
|
||||
|
||||
def convert_value(self, value):
|
||||
if isinstance(value, six.string_types) or isinstance(value, dict):
|
||||
value = [value]
|
||||
|
||||
if isinstance(value, (tuple, list, set)):
|
||||
string_paths = []
|
||||
dict_items = []
|
||||
for item in value:
|
||||
if isinstance(item, six.string_types):
|
||||
string_paths.append(item.strip())
|
||||
elif isinstance(item, dict):
|
||||
try:
|
||||
FileDefItem.from_dict(item)
|
||||
dict_items.append(item)
|
||||
except (ValueError, KeyError):
|
||||
pass
|
||||
|
||||
if string_paths:
|
||||
file_items = FileDefItem.from_paths(string_paths)
|
||||
dict_items.extend([
|
||||
file_item.to_dict()
|
||||
for file_item in file_items
|
||||
])
|
||||
|
||||
if not self.single_item:
|
||||
return dict_items
|
||||
|
||||
if not dict_items:
|
||||
return self.default
|
||||
return dict_items[0]
|
||||
|
||||
if self.single_item:
|
||||
return FileDefItem.create_empty_item().to_dict()
|
||||
return []
|
||||
|
||||
|
||||
def serialize_attr_def(attr_def):
|
||||
"""Serialize attribute definition to data.
|
||||
|
||||
Args:
|
||||
attr_def (AbstractAttrDef): Attribute definition to serialize.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Serialized data.
|
||||
"""
|
||||
|
||||
return attr_def.serialize()
|
||||
|
||||
|
||||
def serialize_attr_defs(attr_defs):
|
||||
"""Serialize attribute definitions to data.
|
||||
|
||||
Args:
|
||||
attr_defs (List[AbstractAttrDef]): Attribute definitions to serialize.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Serialized data.
|
||||
"""
|
||||
|
||||
return [
|
||||
serialize_attr_def(attr_def)
|
||||
for attr_def in attr_defs
|
||||
]
|
||||
|
||||
|
||||
def deserialize_attr_def(attr_def_data):
|
||||
"""Deserialize attribute definition from data.
|
||||
|
||||
Args:
|
||||
attr_def (Dict[str, Any]): Attribute definition data to deserialize.
|
||||
"""
|
||||
|
||||
attr_type = attr_def_data.pop("type")
|
||||
cls = _attr_defs_by_type[attr_type]
|
||||
return cls.deserialize(attr_def_data)
|
||||
|
||||
|
||||
def deserialize_attr_defs(attr_defs_data):
|
||||
"""Deserialize attribute definitions.
|
||||
|
||||
Args:
|
||||
List[Dict[str, Any]]: List of attribute definitions.
|
||||
"""
|
||||
|
||||
return [
|
||||
deserialize_attr_def(attr_def_data)
|
||||
for attr_def_data in attr_defs_data
|
||||
]
|
||||
|
||||
|
||||
# Register attribute definitions
|
||||
for _attr_class in (
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
UnknownDef,
|
||||
NumberDef,
|
||||
TextDef,
|
||||
EnumDef,
|
||||
BoolDef,
|
||||
FileDef
|
||||
):
|
||||
register_attr_def_class(_attr_class)
|
||||
38
client/ayon_core/lib/connections.py
Normal file
38
client/ayon_core/lib/connections.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
import requests
|
||||
import os
|
||||
|
||||
|
||||
def requests_post(*args, **kwargs):
|
||||
"""Wrap request post method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True)
|
||||
return requests.post(*args, **kwargs)
|
||||
|
||||
|
||||
def requests_get(*args, **kwargs):
|
||||
"""Wrap request get method.
|
||||
|
||||
Disabling SSL certificate validation if ``DONT_VERIFY_SSL`` environment
|
||||
variable is found. This is useful when Deadline server is
|
||||
running with self-signed certificates and its certificate is not
|
||||
added to trusted certificates on client machines.
|
||||
|
||||
Warning:
|
||||
Disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing, and it is not recommended.
|
||||
|
||||
"""
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = not os.getenv("OPENPYPE_DONT_VERIFY_SSL", True)
|
||||
return requests.get(*args, **kwargs)
|
||||
95
client/ayon_core/lib/dateutils.py
Normal file
95
client/ayon_core/lib/dateutils.py
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Get configuration data."""
|
||||
import datetime
|
||||
|
||||
|
||||
def get_datetime_data(datetime_obj=None):
|
||||
"""Returns current datetime data as dictionary.
|
||||
|
||||
Args:
|
||||
datetime_obj (datetime): Specific datetime object
|
||||
|
||||
Returns:
|
||||
dict: prepared date & time data
|
||||
|
||||
Available keys:
|
||||
"d" - <Day of month number> in shortest possible way.
|
||||
"dd" - <Day of month number> with 2 digits.
|
||||
"ddd" - <Week day name> shortened week day. e.g.: `Mon`, ...
|
||||
"dddd" - <Week day name> full name of week day. e.g.: `Monday`, ...
|
||||
"m" - <Month number> in shortest possible way. e.g.: `1` if January
|
||||
"mm" - <Month number> with 2 digits.
|
||||
"mmm" - <Month name> shortened month name. e.g.: `Jan`, ...
|
||||
"mmmm" - <Month name> full month name. e.g.: `January`, ...
|
||||
"yy" - <Year number> shortened year. e.g.: `19`, `20`, ...
|
||||
"yyyy" - <Year number> full year. e.g.: `2019`, `2020`, ...
|
||||
"H" - <Hours number 24-hour> shortened hours.
|
||||
"HH" - <Hours number 24-hour> with 2 digits.
|
||||
"h" - <Hours number 12-hour> shortened hours.
|
||||
"hh" - <Hours number 12-hour> with 2 digits.
|
||||
"ht" - <Midday type> AM or PM.
|
||||
"M" - <Minutes number> shortened minutes.
|
||||
"MM" - <Minutes number> with 2 digits.
|
||||
"S" - <Seconds number> shortened seconds.
|
||||
"SS" - <Seconds number> with 2 digits.
|
||||
"""
|
||||
|
||||
if not datetime_obj:
|
||||
datetime_obj = datetime.datetime.now()
|
||||
|
||||
year = datetime_obj.strftime("%Y")
|
||||
|
||||
month = datetime_obj.strftime("%m")
|
||||
month_name_full = datetime_obj.strftime("%B")
|
||||
month_name_short = datetime_obj.strftime("%b")
|
||||
day = datetime_obj.strftime("%d")
|
||||
|
||||
weekday_full = datetime_obj.strftime("%A")
|
||||
weekday_short = datetime_obj.strftime("%a")
|
||||
|
||||
hours = datetime_obj.strftime("%H")
|
||||
hours_midday = datetime_obj.strftime("%I")
|
||||
hour_midday_type = datetime_obj.strftime("%p")
|
||||
minutes = datetime_obj.strftime("%M")
|
||||
seconds = datetime_obj.strftime("%S")
|
||||
|
||||
return {
|
||||
"d": str(int(day)),
|
||||
"dd": str(day),
|
||||
"ddd": weekday_short,
|
||||
"dddd": weekday_full,
|
||||
"m": str(int(month)),
|
||||
"mm": str(month),
|
||||
"mmm": month_name_short,
|
||||
"mmmm": month_name_full,
|
||||
"yy": str(year[2:]),
|
||||
"yyyy": str(year),
|
||||
"H": str(int(hours)),
|
||||
"HH": str(hours),
|
||||
"h": str(int(hours_midday)),
|
||||
"hh": str(hours_midday),
|
||||
"ht": hour_midday_type,
|
||||
"M": str(int(minutes)),
|
||||
"MM": str(minutes),
|
||||
"S": str(int(seconds)),
|
||||
"SS": str(seconds),
|
||||
}
|
||||
|
||||
|
||||
def get_timestamp(datetime_obj=None):
|
||||
"""Get standardized timestamp from datetime object.
|
||||
|
||||
Args:
|
||||
datetime_obj (datetime.datetime): Object of datetime. Current time
|
||||
is used if not passed.
|
||||
"""
|
||||
|
||||
if datetime_obj is None:
|
||||
datetime_obj = datetime.datetime.now()
|
||||
return datetime_obj.strftime(
|
||||
"%Y%m%dT%H%M%SZ"
|
||||
)
|
||||
|
||||
|
||||
def get_formatted_current_time():
|
||||
return get_timestamp()
|
||||
71
client/ayon_core/lib/env_tools.py
Normal file
71
client/ayon_core/lib/env_tools.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import os
|
||||
|
||||
|
||||
def env_value_to_bool(env_key=None, value=None, default=False):
|
||||
"""Convert environment variable value to boolean.
|
||||
|
||||
Function is based on value of the environemt variable. Value is lowered
|
||||
so function is not case sensitive.
|
||||
|
||||
Returns:
|
||||
bool: If value match to one of ["true", "yes", "1"] result if True
|
||||
but if value match to ["false", "no", "0"] result is False else
|
||||
default value is returned.
|
||||
"""
|
||||
if value is None and env_key is None:
|
||||
return default
|
||||
|
||||
if value is None:
|
||||
value = os.environ.get(env_key)
|
||||
|
||||
if value is not None:
|
||||
value = str(value).lower()
|
||||
if value in ("true", "yes", "1", "on"):
|
||||
return True
|
||||
elif value in ("false", "no", "0", "off"):
|
||||
return False
|
||||
return default
|
||||
|
||||
|
||||
def get_paths_from_environ(env_key=None, env_value=None, return_first=False):
|
||||
"""Return existing paths from specific environment variable.
|
||||
|
||||
Args:
|
||||
env_key (str): Environment key where should look for paths.
|
||||
env_value (str): Value of environment variable. Argument `env_key` is
|
||||
skipped if this argument is entered.
|
||||
return_first (bool): Return first found value or return list of found
|
||||
paths. `None` or empty list returned if nothing found.
|
||||
|
||||
Returns:
|
||||
str, list, None: Result of found path/s.
|
||||
"""
|
||||
existing_paths = []
|
||||
if not env_key and not env_value:
|
||||
if return_first:
|
||||
return None
|
||||
return existing_paths
|
||||
|
||||
if env_value is None:
|
||||
env_value = os.environ.get(env_key) or ""
|
||||
|
||||
path_items = env_value.split(os.pathsep)
|
||||
for path in path_items:
|
||||
# Skip empty string
|
||||
if not path:
|
||||
continue
|
||||
# Normalize path
|
||||
path = os.path.normpath(path)
|
||||
# Check if path exists
|
||||
if os.path.exists(path):
|
||||
# Return path if `return_first` is set to True
|
||||
if return_first:
|
||||
return path
|
||||
# Store path
|
||||
existing_paths.append(path)
|
||||
|
||||
# Return None if none of paths exists
|
||||
if return_first:
|
||||
return None
|
||||
# Return all existing paths from environment variable
|
||||
return existing_paths
|
||||
719
client/ayon_core/lib/events.py
Normal file
719
client/ayon_core/lib/events.py
Normal file
|
|
@ -0,0 +1,719 @@
|
|||
"""Events holding data about specific event."""
|
||||
import os
|
||||
import re
|
||||
import copy
|
||||
import inspect
|
||||
import collections
|
||||
import logging
|
||||
import weakref
|
||||
from uuid import uuid4
|
||||
|
||||
from .python_2_comp import WeakMethod
|
||||
from .python_module_tools import is_func_signature_supported
|
||||
|
||||
|
||||
class MissingEventSystem(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get_func_ref(func):
|
||||
if inspect.ismethod(func):
|
||||
return WeakMethod(func)
|
||||
return weakref.ref(func)
|
||||
|
||||
|
||||
def _get_func_info(func):
|
||||
path = "<unknown path>"
|
||||
if func is None:
|
||||
return "<unknown>", path
|
||||
|
||||
if hasattr(func, "__name__"):
|
||||
name = func.__name__
|
||||
else:
|
||||
name = str(func)
|
||||
|
||||
# Get path to file and fallback to '<unknown path>' if fails
|
||||
# NOTE This was added because of 'partial' functions which is handled,
|
||||
# but who knows what else can cause this to fail?
|
||||
try:
|
||||
path = os.path.abspath(inspect.getfile(func))
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
return name, path
|
||||
|
||||
|
||||
class weakref_partial:
|
||||
"""Partial function with weak reference to the wrapped function.
|
||||
|
||||
Can be used as 'functools.partial' but it will store weak reference to
|
||||
function. That means that the function must be reference counted
|
||||
to avoid garbage collecting the function itself.
|
||||
|
||||
When the referenced functions is garbage collected then calling the
|
||||
weakref partial (no matter the args/kwargs passed) will do nothing.
|
||||
It will fail silently, returning `None`. The `is_valid()` method can
|
||||
be used to detect whether the reference is still valid.
|
||||
|
||||
Is useful for object methods. In that case the callback is
|
||||
deregistered when object is destroyed.
|
||||
|
||||
Warnings:
|
||||
Values passed as *args and **kwargs are stored strongly in memory.
|
||||
That may "keep alive" objects that should be already destroyed.
|
||||
It is recommended to pass only immutable objects like 'str',
|
||||
'bool', 'int' etc.
|
||||
|
||||
Args:
|
||||
func (Callable): Function to wrap.
|
||||
*args: Arguments passed to the wrapped function.
|
||||
**kwargs: Keyword arguments passed to the wrapped function.
|
||||
"""
|
||||
|
||||
def __init__(self, func, *args, **kwargs):
|
||||
self._func_ref = _get_func_ref(func)
|
||||
self._args = args
|
||||
self._kwargs = kwargs
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
func = self._func_ref()
|
||||
if func is None:
|
||||
return
|
||||
|
||||
new_args = tuple(list(self._args) + list(args))
|
||||
new_kwargs = dict(self._kwargs)
|
||||
new_kwargs.update(kwargs)
|
||||
return func(*new_args, **new_kwargs)
|
||||
|
||||
def get_func(self):
|
||||
"""Get wrapped function.
|
||||
|
||||
Returns:
|
||||
Union[Callable, None]: Wrapped function or None if it was
|
||||
destroyed.
|
||||
"""
|
||||
|
||||
return self._func_ref()
|
||||
|
||||
def is_valid(self):
|
||||
"""Check if wrapped function is still valid.
|
||||
|
||||
Returns:
|
||||
bool: Is wrapped function still valid.
|
||||
"""
|
||||
|
||||
return self._func_ref() is not None
|
||||
|
||||
def validate_signature(self, *args, **kwargs):
|
||||
"""Validate if passed arguments are supported by wrapped function.
|
||||
|
||||
Returns:
|
||||
bool: Are passed arguments supported by wrapped function.
|
||||
"""
|
||||
|
||||
func = self._func_ref()
|
||||
if func is None:
|
||||
return False
|
||||
|
||||
new_args = tuple(list(self._args) + list(args))
|
||||
new_kwargs = dict(self._kwargs)
|
||||
new_kwargs.update(kwargs)
|
||||
return is_func_signature_supported(
|
||||
func, *new_args, **new_kwargs
|
||||
)
|
||||
|
||||
|
||||
class EventCallback(object):
|
||||
"""Callback registered to a topic.
|
||||
|
||||
The callback function is registered to a topic. Topic is a string which
|
||||
may contain '*' that will be handled as "any characters".
|
||||
|
||||
# Examples:
|
||||
- "workfile.save" Callback will be triggered if the event topic is
|
||||
exactly "workfile.save" .
|
||||
- "workfile.*" Callback will be triggered an event topic starts with
|
||||
"workfile." so "workfile.save" and "workfile.open"
|
||||
will trigger the callback.
|
||||
- "*" Callback will listen to all events.
|
||||
|
||||
Callback can be function or method. In both cases it should expect one
|
||||
or none arguments. When 1 argument is expected then the processed 'Event'
|
||||
object is passed in.
|
||||
|
||||
The callbacks are validated against their reference counter, that is
|
||||
achieved using 'weakref' module. That means that the callback must
|
||||
be stored in memory somewhere. e.g. lambda functions are not
|
||||
supported as valid callback.
|
||||
|
||||
You can use 'weakref_partial' functions. In that case is partial object
|
||||
stored in the callback object and reference counter is checked for
|
||||
the wrapped function.
|
||||
|
||||
Args:
|
||||
topic (str): Topic which will be listened.
|
||||
func (Callable): Callback to a topic.
|
||||
order (Union[int, None]): Order of callback. Lower number means higher
|
||||
priority.
|
||||
|
||||
Raises:
|
||||
TypeError: When passed function is not a callable object.
|
||||
"""
|
||||
|
||||
def __init__(self, topic, func, order):
|
||||
if not callable(func):
|
||||
raise TypeError((
|
||||
"Registered callback is not callable. \"{}\""
|
||||
).format(str(func)))
|
||||
|
||||
self._validate_order(order)
|
||||
|
||||
self._log = None
|
||||
self._topic = topic
|
||||
self._order = order
|
||||
self._enabled = True
|
||||
# Replace '*' with any character regex and escape rest of text
|
||||
# - when callback is registered for '*' topic it will receive all
|
||||
# events
|
||||
# - it is possible to register to a partial topis 'my.event.*'
|
||||
# - it will receive all matching event topics
|
||||
# e.g. 'my.event.start' and 'my.event.end'
|
||||
topic_regex_str = "^{}$".format(
|
||||
".+".join(
|
||||
re.escape(part)
|
||||
for part in topic.split("*")
|
||||
)
|
||||
)
|
||||
topic_regex = re.compile(topic_regex_str)
|
||||
self._topic_regex = topic_regex
|
||||
|
||||
# Callback function prep
|
||||
if isinstance(func, weakref_partial):
|
||||
partial_func = func
|
||||
(name, path) = _get_func_info(func.get_func())
|
||||
func_ref = None
|
||||
expect_args = partial_func.validate_signature("fake")
|
||||
expect_kwargs = partial_func.validate_signature(event="fake")
|
||||
|
||||
else:
|
||||
partial_func = None
|
||||
(name, path) = _get_func_info(func)
|
||||
# Convert callback into references
|
||||
# - deleted functions won't cause crashes
|
||||
func_ref = _get_func_ref(func)
|
||||
|
||||
# Get expected arguments from function spec
|
||||
# - positional arguments are always preferred
|
||||
expect_args = is_func_signature_supported(func, "fake")
|
||||
expect_kwargs = is_func_signature_supported(func, event="fake")
|
||||
|
||||
self._func_ref = func_ref
|
||||
self._partial_func = partial_func
|
||||
self._ref_is_valid = True
|
||||
self._expect_args = expect_args
|
||||
self._expect_kwargs = expect_kwargs
|
||||
|
||||
self._name = name
|
||||
self._path = path
|
||||
|
||||
def __repr__(self):
|
||||
return "< {} - {} > {}".format(
|
||||
self.__class__.__name__, self._name, self._path
|
||||
)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = logging.getLogger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
@property
|
||||
def is_ref_valid(self):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
bool: Is reference to callback valid.
|
||||
"""
|
||||
|
||||
self._validate_ref()
|
||||
return self._ref_is_valid
|
||||
|
||||
def validate_ref(self):
|
||||
"""Validate if reference to callback is valid.
|
||||
|
||||
Deprecated:
|
||||
Reference is always live checkd with 'is_ref_valid'.
|
||||
"""
|
||||
|
||||
# Trigger validate by getting 'is_valid'
|
||||
_ = self.is_ref_valid
|
||||
|
||||
@property
|
||||
def enabled(self):
|
||||
"""Is callback enabled.
|
||||
|
||||
Returns:
|
||||
bool: Is callback enabled.
|
||||
"""
|
||||
|
||||
return self._enabled
|
||||
|
||||
def set_enabled(self, enabled):
|
||||
"""Change if callback is enabled.
|
||||
|
||||
Args:
|
||||
enabled (bool): Change enabled state of the callback.
|
||||
"""
|
||||
|
||||
self._enabled = enabled
|
||||
|
||||
def deregister(self):
|
||||
"""Calling this function will cause that callback will be removed."""
|
||||
|
||||
self._ref_is_valid = False
|
||||
self._partial_func = None
|
||||
self._func_ref = None
|
||||
|
||||
def get_order(self):
|
||||
"""Get callback order.
|
||||
|
||||
Returns:
|
||||
Union[int, None]: Callback order.
|
||||
"""
|
||||
|
||||
return self._order
|
||||
|
||||
def set_order(self, order):
|
||||
"""Change callback order.
|
||||
|
||||
Args:
|
||||
order (Union[int, None]): Order of callback. Lower number means
|
||||
higher priority.
|
||||
"""
|
||||
|
||||
self._validate_order(order)
|
||||
self._order = order
|
||||
|
||||
order = property(get_order, set_order)
|
||||
|
||||
def topic_matches(self, topic):
|
||||
"""Check if event topic matches callback's topic.
|
||||
|
||||
Args:
|
||||
topic (str): Topic name.
|
||||
|
||||
Returns:
|
||||
bool: Topic matches callback's topic.
|
||||
"""
|
||||
|
||||
return self._topic_regex.match(topic)
|
||||
|
||||
def process_event(self, event):
|
||||
"""Process event.
|
||||
|
||||
Args:
|
||||
event(Event): Event that was triggered.
|
||||
"""
|
||||
|
||||
# Skip if callback is not enabled
|
||||
if not self._enabled:
|
||||
return
|
||||
|
||||
# Get reference and skip if is not available
|
||||
callback = self._get_callback()
|
||||
if callback is None:
|
||||
return
|
||||
|
||||
if not self.topic_matches(event.topic):
|
||||
return
|
||||
|
||||
# Try to execute callback
|
||||
try:
|
||||
if self._expect_args:
|
||||
callback(event)
|
||||
|
||||
elif self._expect_kwargs:
|
||||
callback(event=event)
|
||||
|
||||
else:
|
||||
callback()
|
||||
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to execute event callback {}".format(
|
||||
str(repr(self))
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
def _validate_order(self, order):
|
||||
if isinstance(order, int):
|
||||
return
|
||||
|
||||
raise TypeError(
|
||||
"Expected type 'int' got '{}'.".format(str(type(order)))
|
||||
)
|
||||
|
||||
def _get_callback(self):
|
||||
if self._partial_func is not None:
|
||||
return self._partial_func
|
||||
|
||||
if self._func_ref is not None:
|
||||
return self._func_ref()
|
||||
return None
|
||||
|
||||
def _validate_ref(self):
|
||||
if self._ref_is_valid is False:
|
||||
return
|
||||
|
||||
if self._func_ref is not None:
|
||||
self._ref_is_valid = self._func_ref() is not None
|
||||
|
||||
elif self._partial_func is not None:
|
||||
self._ref_is_valid = self._partial_func.is_valid()
|
||||
|
||||
else:
|
||||
self._ref_is_valid = False
|
||||
|
||||
if not self._ref_is_valid:
|
||||
self._func_ref = None
|
||||
self._partial_func = None
|
||||
|
||||
|
||||
# Inherit from 'object' for Python 2 hosts
|
||||
class Event(object):
|
||||
"""Base event object.
|
||||
|
||||
Can be used for any event because is not specific. Only required argument
|
||||
is topic which defines why event is happening and may be used for
|
||||
filtering.
|
||||
|
||||
Arg:
|
||||
topic (str): Identifier of event.
|
||||
data (Any): Data specific for event. Dictionary is recommended.
|
||||
source (str): Identifier of source.
|
||||
event_system (EventSystem): Event system in which can be event
|
||||
triggered.
|
||||
"""
|
||||
|
||||
_data = {}
|
||||
|
||||
def __init__(self, topic, data=None, source=None, event_system=None):
|
||||
self._id = str(uuid4())
|
||||
self._topic = topic
|
||||
if data is None:
|
||||
data = {}
|
||||
self._data = data
|
||||
self._source = source
|
||||
self._event_system = event_system
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self._data.get(key, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
"""Event's source used for triggering callbacks.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Source string or None. Source is optional.
|
||||
"""
|
||||
|
||||
return self._source
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def topic(self):
|
||||
"""Event's topic used for triggering callbacks.
|
||||
|
||||
Returns:
|
||||
str: Topic string.
|
||||
"""
|
||||
|
||||
return self._topic
|
||||
|
||||
def emit(self):
|
||||
"""Emit event and trigger callbacks."""
|
||||
if self._event_system is None:
|
||||
raise MissingEventSystem(
|
||||
"Can't emit event {}. Does not have set event system.".format(
|
||||
str(repr(self))
|
||||
)
|
||||
)
|
||||
self._event_system.emit_event(self)
|
||||
|
||||
def to_data(self):
|
||||
"""Convert Event object to data.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Event data.
|
||||
"""
|
||||
|
||||
return {
|
||||
"id": self.id,
|
||||
"topic": self.topic,
|
||||
"source": self.source,
|
||||
"data": copy.deepcopy(self.data)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, event_data, event_system=None):
|
||||
"""Create event from data.
|
||||
|
||||
Args:
|
||||
event_data (Dict[str, Any]): Event data with defined keys. Can be
|
||||
created using 'to_data' method.
|
||||
event_system (EventSystem): System to which the event belongs.
|
||||
|
||||
Returns:
|
||||
Event: Event with attributes from passed data.
|
||||
"""
|
||||
|
||||
obj = cls(
|
||||
event_data["topic"],
|
||||
event_data["data"],
|
||||
event_data["source"],
|
||||
event_system
|
||||
)
|
||||
obj._id = event_data["id"]
|
||||
return obj
|
||||
|
||||
|
||||
class EventSystem(object):
|
||||
"""Encapsulate event handling into an object.
|
||||
|
||||
System wraps registered callbacks and triggered events into single object,
|
||||
so it is possible to create multiple independent systems that have their
|
||||
topics and callbacks.
|
||||
|
||||
Callbacks are stored by order of their registration, but it is possible to
|
||||
manually define order of callbacks using 'order' argument within
|
||||
'add_callback'.
|
||||
"""
|
||||
|
||||
default_order = 100
|
||||
|
||||
def __init__(self):
|
||||
self._registered_callbacks = []
|
||||
|
||||
def add_callback(self, topic, callback, order=None):
|
||||
"""Register callback in event system.
|
||||
|
||||
Args:
|
||||
topic (str): Topic for EventCallback.
|
||||
callback (Union[Callable, weakref_partial]): Function or method
|
||||
that will be called when topic is triggered.
|
||||
order (Optional[int]): Order of callback. Lower number means
|
||||
higher priority.
|
||||
|
||||
Returns:
|
||||
EventCallback: Created callback object which can be used to
|
||||
stop listening.
|
||||
"""
|
||||
|
||||
if order is None:
|
||||
order = self.default_order
|
||||
|
||||
callback = EventCallback(topic, callback, order)
|
||||
self._registered_callbacks.append(callback)
|
||||
return callback
|
||||
|
||||
def create_event(self, topic, data, source):
|
||||
"""Create new event which is bound to event system.
|
||||
|
||||
Args:
|
||||
topic (str): Event topic.
|
||||
data (dict): Data related to event.
|
||||
source (str): Source of event.
|
||||
|
||||
Returns:
|
||||
Event: Object of event.
|
||||
"""
|
||||
|
||||
return Event(topic, data, source, self)
|
||||
|
||||
def emit(self, topic, data, source):
|
||||
"""Create event based on passed data and emit it.
|
||||
|
||||
This is easiest way how to trigger event in an event system.
|
||||
|
||||
Args:
|
||||
topic (str): Event topic.
|
||||
data (dict): Data related to event.
|
||||
source (str): Source of event.
|
||||
|
||||
Returns:
|
||||
Event: Created and emitted event.
|
||||
"""
|
||||
|
||||
event = self.create_event(topic, data, source)
|
||||
event.emit()
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
self._process_event(event)
|
||||
|
||||
def _process_event(self, event):
|
||||
"""Process event topic and trigger callbacks.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
callbacks = tuple(sorted(
|
||||
self._registered_callbacks, key=lambda x: x.order
|
||||
))
|
||||
for callback in callbacks:
|
||||
callback.process_event(event)
|
||||
if not callback.is_ref_valid:
|
||||
self._registered_callbacks.remove(callback)
|
||||
|
||||
|
||||
class QueuedEventSystem(EventSystem):
|
||||
"""Events are automatically processed in queue.
|
||||
|
||||
If callback triggers another event, the event is not processed until
|
||||
all callbacks of previous event are processed.
|
||||
|
||||
Allows to implement custom event process loop by changing 'auto_execute'.
|
||||
|
||||
Note:
|
||||
This probably should be default behavior of 'EventSystem'. Changing it
|
||||
now could cause problems in existing code.
|
||||
|
||||
Args:
|
||||
auto_execute (Optional[bool]): If 'True', events are processed
|
||||
automatically. Custom loop calling 'process_next_event'
|
||||
must be implemented when set to 'False'.
|
||||
"""
|
||||
|
||||
def __init__(self, auto_execute=True):
|
||||
super(QueuedEventSystem, self).__init__()
|
||||
self._event_queue = collections.deque()
|
||||
self._current_event = None
|
||||
self._auto_execute = auto_execute
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
||||
|
||||
def count(self):
|
||||
"""Get number of events in queue.
|
||||
|
||||
Returns:
|
||||
int: Number of events in queue.
|
||||
"""
|
||||
|
||||
return len(self._event_queue)
|
||||
|
||||
def process_next_event(self):
|
||||
"""Process next event in queue.
|
||||
|
||||
Should be used only if 'auto_execute' is set to 'False'. Only single
|
||||
event is processed.
|
||||
|
||||
Returns:
|
||||
Union[Event, None]: Processed event.
|
||||
"""
|
||||
|
||||
if self._current_event is not None:
|
||||
raise ValueError("An event is already in progress.")
|
||||
|
||||
if not self._event_queue:
|
||||
return None
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
if not self._auto_execute or self._current_event is not None:
|
||||
self._event_queue.append(event)
|
||||
return
|
||||
|
||||
self._event_queue.append(event)
|
||||
while self._event_queue:
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
|
||||
|
||||
class GlobalEventSystem:
|
||||
"""Event system living in global scope of process.
|
||||
|
||||
This is primarily used in host implementation to trigger events
|
||||
related to DCC changes or changes of context in the host implementation.
|
||||
"""
|
||||
|
||||
_global_event_system = None
|
||||
|
||||
@classmethod
|
||||
def get_global_event_system(cls):
|
||||
if cls._global_event_system is None:
|
||||
cls._global_event_system = EventSystem()
|
||||
return cls._global_event_system
|
||||
|
||||
@classmethod
|
||||
def add_callback(cls, topic, callback):
|
||||
event_system = cls.get_global_event_system()
|
||||
return event_system.add_callback(topic, callback)
|
||||
|
||||
@classmethod
|
||||
def emit(cls, topic, data, source):
|
||||
event_system = cls.get_global_event_system()
|
||||
return event_system.emit(topic, data, source)
|
||||
|
||||
|
||||
def register_event_callback(topic, callback):
|
||||
"""Add callback that will be executed on specific topic.
|
||||
|
||||
Args:
|
||||
topic(str): Topic on which will callback be triggered.
|
||||
callback(function): Callback that will be triggered when a topic
|
||||
is triggered. Callback should expect none or 1 argument where
|
||||
`Event` object is passed.
|
||||
|
||||
Returns:
|
||||
EventCallback: Object wrapping the callback. It can be used to
|
||||
enable/disable listening to a topic or remove the callback from
|
||||
the topic completely.
|
||||
"""
|
||||
|
||||
return GlobalEventSystem.add_callback(topic, callback)
|
||||
|
||||
|
||||
def emit_event(topic, data=None, source=None):
|
||||
"""Emit event with topic and data.
|
||||
|
||||
Arg:
|
||||
topic(str): Event's topic.
|
||||
data(dict): Event's additional data. Optional.
|
||||
source(str): Who emitted the topic. Optional.
|
||||
|
||||
Returns:
|
||||
Event: Object of event that was emitted.
|
||||
"""
|
||||
|
||||
return GlobalEventSystem.emit(topic, data, source)
|
||||
480
client/ayon_core/lib/execute.py
Normal file
480
client/ayon_core/lib/execute.py
Normal file
|
|
@ -0,0 +1,480 @@
|
|||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import platform
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
|
||||
from .log import Logger
|
||||
from .vendor_bin_utils import find_executable
|
||||
|
||||
from .openpype_version import is_running_from_build
|
||||
|
||||
# MSDN process creation flag (Windows only)
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
|
||||
|
||||
def execute(args,
|
||||
silent=False,
|
||||
cwd=None,
|
||||
env=None,
|
||||
shell=None):
|
||||
"""Execute command as process.
|
||||
|
||||
This will execute given command as process, monitor its output
|
||||
and log it appropriately.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:mod:`subprocess` module in Python.
|
||||
|
||||
Args:
|
||||
args (list): list of arguments passed to process.
|
||||
silent (bool): control output of executed process.
|
||||
cwd (str): current working directory for process.
|
||||
env (dict): environment variables for process.
|
||||
shell (bool): use shell to execute, default is no.
|
||||
|
||||
Returns:
|
||||
int: return code of process
|
||||
|
||||
"""
|
||||
|
||||
log_levels = ['DEBUG:', 'INFO:', 'ERROR:', 'WARNING:', 'CRITICAL:']
|
||||
|
||||
log = Logger.get_logger('execute')
|
||||
log.info("Executing ({})".format(" ".join(args)))
|
||||
popen = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
bufsize=1,
|
||||
cwd=cwd,
|
||||
env=env or os.environ,
|
||||
shell=shell
|
||||
)
|
||||
|
||||
# Blocks until finished
|
||||
while True:
|
||||
line = popen.stdout.readline()
|
||||
if line == '':
|
||||
break
|
||||
if silent:
|
||||
continue
|
||||
line_test = False
|
||||
for test_string in log_levels:
|
||||
if line.startswith(test_string):
|
||||
line_test = True
|
||||
break
|
||||
if not line_test:
|
||||
print(line[:-1])
|
||||
|
||||
log.info("Execution is finishing up ...")
|
||||
|
||||
popen.wait()
|
||||
return popen.returncode
|
||||
|
||||
|
||||
def run_subprocess(*args, **kwargs):
|
||||
"""Convenience method for getting output errors for subprocess.
|
||||
|
||||
Output logged when process finish.
|
||||
|
||||
Entered arguments and keyword arguments are passed to subprocess Popen.
|
||||
|
||||
On windows are 'creationflags' filled with flags that should cause ignore
|
||||
creation of new window.
|
||||
|
||||
Args:
|
||||
*args: Variable length argument list passed to Popen.
|
||||
**kwargs : Arbitrary keyword arguments passed to Popen. Is possible to
|
||||
pass `logging.Logger` object under "logger" to use custom logger
|
||||
for output.
|
||||
|
||||
Returns:
|
||||
str: Full output of subprocess concatenated stdout and stderr.
|
||||
|
||||
Raises:
|
||||
RuntimeError: Exception is raised if process finished with nonzero
|
||||
return code.
|
||||
"""
|
||||
|
||||
# Modify creation flags on windows to hide console window if in UI mode
|
||||
if (
|
||||
platform.system().lower() == "windows"
|
||||
and "creationflags" not in kwargs
|
||||
# shell=True already tries to hide the console window
|
||||
# and passing these creationflags then shows the window again
|
||||
# so we avoid it for shell=True cases
|
||||
and kwargs.get("shell") is not True
|
||||
):
|
||||
kwargs["creationflags"] = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| getattr(subprocess, "DETACHED_PROCESS", 0)
|
||||
| getattr(subprocess, "CREATE_NO_WINDOW", 0)
|
||||
)
|
||||
|
||||
# Get environents from kwarg or use current process environments if were
|
||||
# not passed.
|
||||
env = kwargs.get("env") or os.environ
|
||||
# Make sure environment contains only strings
|
||||
filtered_env = {str(k): str(v) for k, v in env.items()}
|
||||
|
||||
# Use lib's logger if was not passed with kwargs.
|
||||
logger = kwargs.pop("logger", None)
|
||||
if logger is None:
|
||||
logger = Logger.get_logger("run_subprocess")
|
||||
|
||||
# set overrides
|
||||
kwargs["stdout"] = kwargs.get("stdout", subprocess.PIPE)
|
||||
kwargs["stderr"] = kwargs.get("stderr", subprocess.PIPE)
|
||||
kwargs["stdin"] = kwargs.get("stdin", subprocess.PIPE)
|
||||
kwargs["env"] = filtered_env
|
||||
|
||||
proc = subprocess.Popen(*args, **kwargs)
|
||||
|
||||
full_output = ""
|
||||
_stdout, _stderr = proc.communicate()
|
||||
if _stdout:
|
||||
_stdout = _stdout.decode("utf-8", errors="backslashreplace")
|
||||
full_output += _stdout
|
||||
logger.debug(_stdout)
|
||||
|
||||
if _stderr:
|
||||
_stderr = _stderr.decode("utf-8", errors="backslashreplace")
|
||||
# Add additional line break if output already contains stdout
|
||||
if full_output:
|
||||
full_output += "\n"
|
||||
full_output += _stderr
|
||||
logger.info(_stderr)
|
||||
|
||||
if proc.returncode != 0:
|
||||
exc_msg = "Executing arguments was not successful: \"{}\"".format(args)
|
||||
if _stdout:
|
||||
exc_msg += "\n\nOutput:\n{}".format(_stdout)
|
||||
|
||||
if _stderr:
|
||||
exc_msg += "Error:\n{}".format(_stderr)
|
||||
|
||||
raise RuntimeError(exc_msg)
|
||||
|
||||
return full_output
|
||||
|
||||
|
||||
def clean_envs_for_ayon_process(env=None):
|
||||
"""Modify environments that may affect ayon-launcher process.
|
||||
|
||||
Main reason to implement this function is to pop PYTHONPATH which may be
|
||||
affected by in-host environments.
|
||||
|
||||
Args:
|
||||
env (Optional[dict[str, str]]): Environment variables to modify.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Environment variables for ayon process.
|
||||
"""
|
||||
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
# Exclude some environment variables from a copy of the environment
|
||||
env = env.copy()
|
||||
for key in ["PYTHONPATH", "PYTHONHOME"]:
|
||||
env.pop(key, None)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def clean_envs_for_openpype_process(env=None):
|
||||
"""Modify environments that may affect OpenPype process.
|
||||
|
||||
Main reason to implement this function is to pop PYTHONPATH which may be
|
||||
affected by in-host environments.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return clean_envs_for_ayon_process(env=env)
|
||||
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
# Exclude some environment variables from a copy of the environment
|
||||
env = env.copy()
|
||||
for key in ["PYTHONPATH", "PYTHONHOME"]:
|
||||
env.pop(key, None)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def run_ayon_launcher_process(*args, **kwargs):
|
||||
"""Execute OpenPype process with passed arguments and wait.
|
||||
|
||||
Wrapper for 'run_process' which prepends OpenPype executable arguments
|
||||
before passed arguments and define environments if are not passed.
|
||||
|
||||
Values from 'os.environ' are used for environments if are not passed.
|
||||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
```
|
||||
run_ayon_process("run", "<path to .py script>")
|
||||
```
|
||||
|
||||
Args:
|
||||
*args (str): ayon-launcher cli arguments.
|
||||
**kwargs (Any): Keyword arguments for subprocess.Popen.
|
||||
|
||||
Returns:
|
||||
str: Full output of subprocess concatenated stdout and stderr.
|
||||
"""
|
||||
|
||||
args = get_ayon_launcher_args(*args)
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
# Skip envs that can affect OpenPype process
|
||||
# - fill more if you find more
|
||||
env = clean_envs_for_openpype_process(os.environ)
|
||||
|
||||
# Only keep OpenPype version if we are running from build.
|
||||
if not is_running_from_build():
|
||||
env.pop("OPENPYPE_VERSION", None)
|
||||
|
||||
return run_subprocess(args, env=env, **kwargs)
|
||||
|
||||
|
||||
def run_openpype_process(*args, **kwargs):
|
||||
"""Execute OpenPype process with passed arguments and wait.
|
||||
|
||||
Wrapper for 'run_process' which prepends OpenPype executable arguments
|
||||
before passed arguments and define environments if are not passed.
|
||||
|
||||
Values from 'os.environ' are used for environments if are not passed.
|
||||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
>>> run_openpype_process("version")
|
||||
|
||||
Args:
|
||||
*args (tuple): OpenPype cli arguments.
|
||||
**kwargs (dict): Keyword arguments for subprocess.Popen.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return run_ayon_launcher_process(*args, **kwargs)
|
||||
|
||||
args = get_openpype_execute_args(*args)
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
# Skip envs that can affect OpenPype process
|
||||
# - fill more if you find more
|
||||
env = clean_envs_for_openpype_process(os.environ)
|
||||
|
||||
# Only keep OpenPype version if we are running from build.
|
||||
if not is_running_from_build():
|
||||
env.pop("OPENPYPE_VERSION", None)
|
||||
|
||||
return run_subprocess(args, env=env, **kwargs)
|
||||
|
||||
|
||||
def run_detached_process(args, **kwargs):
|
||||
"""Execute process with passed arguments as separated process.
|
||||
|
||||
Values from 'os.environ' are used for environments if are not passed.
|
||||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
>>> run_detached_process("run", "./path_to.py")
|
||||
|
||||
|
||||
Args:
|
||||
*args (tuple): OpenPype cli arguments.
|
||||
**kwargs (dict): Keyword arguments for subprocess.Popen.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Pointer to launched process but it is possible that
|
||||
launched process is already killed (on linux).
|
||||
"""
|
||||
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# Create copy of passed env
|
||||
kwargs["env"] = {k: v for k, v in env.items()}
|
||||
|
||||
low_platform = platform.system().lower()
|
||||
if low_platform == "darwin":
|
||||
new_args = ["open", "-na", args.pop(0), "--args"]
|
||||
new_args.extend(args)
|
||||
args = new_args
|
||||
|
||||
elif low_platform == "windows":
|
||||
flags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| subprocess.DETACHED_PROCESS
|
||||
)
|
||||
kwargs["creationflags"] = flags
|
||||
|
||||
if not sys.stdout:
|
||||
kwargs["stdout"] = subprocess.DEVNULL
|
||||
kwargs["stderr"] = subprocess.DEVNULL
|
||||
|
||||
elif low_platform == "linux" and get_linux_launcher_args() is not None:
|
||||
json_data = {
|
||||
"args": args,
|
||||
"env": kwargs.pop("env")
|
||||
}
|
||||
json_temp = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="op_app_args", suffix=".json", delete=False
|
||||
)
|
||||
json_temp.close()
|
||||
json_temp_filpath = json_temp.name
|
||||
with open(json_temp_filpath, "w") as stream:
|
||||
json.dump(json_data, stream)
|
||||
|
||||
new_args = get_linux_launcher_args()
|
||||
new_args.append(json_temp_filpath)
|
||||
|
||||
# Create mid-process which will launch application
|
||||
process = subprocess.Popen(new_args, **kwargs)
|
||||
# Wait until the process finishes
|
||||
# - This is important! The process would stay in "open" state.
|
||||
process.wait()
|
||||
# Remove the temp file
|
||||
os.remove(json_temp_filpath)
|
||||
# Return process which is already terminated
|
||||
return process
|
||||
|
||||
process = subprocess.Popen(args, **kwargs)
|
||||
return process
|
||||
|
||||
|
||||
def path_to_subprocess_arg(path):
|
||||
"""Prepare path for subprocess arguments.
|
||||
|
||||
Returned path can be wrapped with quotes or kept as is.
|
||||
"""
|
||||
return subprocess.list2cmdline([path])
|
||||
|
||||
|
||||
def get_ayon_launcher_args(*args):
|
||||
"""Arguments to run ayon-launcher process.
|
||||
|
||||
Arguments for subprocess when need to spawn new pype process. Which may be
|
||||
needed when new python process for pype scripts must be executed in build
|
||||
pype.
|
||||
|
||||
Reasons:
|
||||
Ayon-launcher started from code has different executable set to
|
||||
virtual env python and must have path to script as first argument
|
||||
which is not needed for built application.
|
||||
|
||||
Args:
|
||||
*args (str): Any arguments that will be added after executables.
|
||||
|
||||
Returns:
|
||||
list[str]: List of arguments to run ayon-launcher process.
|
||||
"""
|
||||
|
||||
executable = os.environ["AYON_EXECUTABLE"]
|
||||
launch_args = [executable]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
filepath = os.path.join(os.environ["AYON_ROOT"], "start.py")
|
||||
launch_args.append(filepath)
|
||||
|
||||
if args:
|
||||
launch_args.extend(args)
|
||||
|
||||
return launch_args
|
||||
|
||||
|
||||
def get_openpype_execute_args(*args):
|
||||
"""Arguments to run pype command.
|
||||
|
||||
Arguments for subprocess when need to spawn new pype process. Which may be
|
||||
needed when new python process for pype scripts must be executed in build
|
||||
pype.
|
||||
|
||||
## Why is this needed?
|
||||
Pype executed from code has different executable set to virtual env python
|
||||
and must have path to script as first argument which is not needed for
|
||||
build pype.
|
||||
|
||||
It is possible to pass any arguments that will be added after pype
|
||||
executables.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return get_ayon_launcher_args(*args)
|
||||
|
||||
executable = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
launch_args = [executable]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
filepath = os.path.join(os.environ["OPENPYPE_ROOT"], "start.py")
|
||||
launch_args.append(filepath)
|
||||
|
||||
if args:
|
||||
launch_args.extend(args)
|
||||
|
||||
return launch_args
|
||||
|
||||
|
||||
def get_linux_launcher_args(*args):
|
||||
"""Path to application mid process executable.
|
||||
|
||||
This function should be able as arguments are different when used
|
||||
from code and build.
|
||||
|
||||
It is possible that this function is used in OpenPype build which does
|
||||
not have yet the new executable. In that case 'None' is returned.
|
||||
|
||||
Todos:
|
||||
Replace by script in scripts for ayon-launcher.
|
||||
|
||||
Args:
|
||||
args (iterable): List of additional arguments added after executable
|
||||
argument.
|
||||
|
||||
Returns:
|
||||
list: Executables with possible positional argument to script when
|
||||
called from code.
|
||||
"""
|
||||
|
||||
filename = "app_launcher"
|
||||
if AYON_SERVER_ENABLED:
|
||||
executable = os.environ["AYON_EXECUTABLE"]
|
||||
else:
|
||||
executable = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
|
||||
executable_filename = os.path.basename(executable)
|
||||
if "python" in executable_filename.lower():
|
||||
if AYON_SERVER_ENABLED:
|
||||
root = os.environ["AYON_ROOT"]
|
||||
else:
|
||||
root = os.environ["OPENPYPE_ROOT"]
|
||||
script_path = os.path.join(root, "{}.py".format(filename))
|
||||
launch_args = [executable, script_path]
|
||||
else:
|
||||
new_executable = os.path.join(
|
||||
os.path.dirname(executable),
|
||||
filename
|
||||
)
|
||||
executable_path = find_executable(new_executable)
|
||||
if executable_path is None:
|
||||
return None
|
||||
launch_args = [executable_path]
|
||||
|
||||
if args:
|
||||
launch_args.extend(args)
|
||||
|
||||
return launch_args
|
||||
214
client/ayon_core/lib/file_transaction.py
Normal file
214
client/ayon_core/lib/file_transaction.py
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
import os
|
||||
import logging
|
||||
import sys
|
||||
import errno
|
||||
import six
|
||||
|
||||
from openpype.lib import create_hard_link
|
||||
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
from speedcopy import copyfile
|
||||
else:
|
||||
from shutil import copyfile
|
||||
|
||||
|
||||
class DuplicateDestinationError(ValueError):
|
||||
"""Error raised when transfer destination already exists in queue.
|
||||
|
||||
The error is only raised if `allow_queue_replacements` is False on the
|
||||
FileTransaction instance and the added file to transfer is of a different
|
||||
src file than the one already detected in the queue.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class FileTransaction(object):
|
||||
"""File transaction with rollback options.
|
||||
|
||||
The file transaction is a three-step process.
|
||||
|
||||
1) Rename any existing files to a "temporary backup" during `process()`
|
||||
2) Copy the files to final destination during `process()`
|
||||
3) Remove any backed up files (*no rollback possible!) during `finalize()`
|
||||
|
||||
Step 3 is done during `finalize()`. If not called the .bak files will
|
||||
remain on disk.
|
||||
|
||||
These steps try to ensure that we don't overwrite half of any existing
|
||||
files e.g. if they are currently in use.
|
||||
|
||||
Note:
|
||||
A regular filesystem is *not* a transactional file system and even
|
||||
though this implementation tries to produce a 'safe copy' with a
|
||||
potential rollback do keep in mind that it's inherently unsafe due
|
||||
to how filesystem works and a myriad of things could happen during
|
||||
the transaction that break the logic. A file storage could go down,
|
||||
permissions could be changed, other machines could be moving or writing
|
||||
files. A lot can happen.
|
||||
|
||||
Warning:
|
||||
Any folders created during the transfer will not be removed.
|
||||
"""
|
||||
|
||||
MODE_COPY = 0
|
||||
MODE_HARDLINK = 1
|
||||
|
||||
def __init__(self, log=None, allow_queue_replacements=False):
|
||||
if log is None:
|
||||
log = logging.getLogger("FileTransaction")
|
||||
|
||||
self.log = log
|
||||
|
||||
# The transfer queue
|
||||
# todo: make this an actual FIFO queue?
|
||||
self._transfers = {}
|
||||
|
||||
# Destination file paths that a file was transferred to
|
||||
self._transferred = []
|
||||
|
||||
# Backup file location mapping to original locations
|
||||
self._backup_to_original = {}
|
||||
|
||||
self._allow_queue_replacements = allow_queue_replacements
|
||||
|
||||
def add(self, src, dst, mode=MODE_COPY):
|
||||
"""Add a new file to transfer queue.
|
||||
|
||||
Args:
|
||||
src (str): Source path.
|
||||
dst (str): Destination path.
|
||||
mode (MODE_COPY, MODE_HARDLINK): Transfer mode.
|
||||
"""
|
||||
|
||||
opts = {"mode": mode}
|
||||
|
||||
src = os.path.normpath(os.path.abspath(src))
|
||||
dst = os.path.normpath(os.path.abspath(dst))
|
||||
|
||||
if dst in self._transfers:
|
||||
queued_src = self._transfers[dst][0]
|
||||
if src == queued_src:
|
||||
self.log.debug(
|
||||
"File transfer was already in queue: {} -> {}".format(
|
||||
src, dst))
|
||||
return
|
||||
else:
|
||||
if not self._allow_queue_replacements:
|
||||
raise DuplicateDestinationError(
|
||||
"Transfer to destination is already in queue: "
|
||||
"{} -> {}. It's not allowed to be replaced by "
|
||||
"a new transfer from {}".format(
|
||||
queued_src, dst, src
|
||||
))
|
||||
|
||||
self.log.warning("File transfer in queue replaced..")
|
||||
self.log.debug(
|
||||
"Removed from queue: {} -> {} replaced by {} -> {}".format(
|
||||
queued_src, dst, src, dst))
|
||||
|
||||
self._transfers[dst] = (src, opts)
|
||||
|
||||
def process(self):
|
||||
# Backup any existing files
|
||||
for dst, (src, _) in self._transfers.items():
|
||||
self.log.debug("Checking file ... {} -> {}".format(src, dst))
|
||||
path_same = self._same_paths(src, dst)
|
||||
if path_same or not os.path.exists(dst):
|
||||
continue
|
||||
|
||||
# Backup original file
|
||||
# todo: add timestamp or uuid to ensure unique
|
||||
backup = dst + ".bak"
|
||||
self._backup_to_original[backup] = dst
|
||||
self.log.debug(
|
||||
"Backup existing file: {} -> {}".format(dst, backup))
|
||||
os.rename(dst, backup)
|
||||
|
||||
# Copy the files to transfer
|
||||
for dst, (src, opts) in self._transfers.items():
|
||||
path_same = self._same_paths(src, dst)
|
||||
if path_same:
|
||||
self.log.debug(
|
||||
"Source and destination are same files {} -> {}".format(
|
||||
src, dst))
|
||||
continue
|
||||
|
||||
self._create_folder_for_file(dst)
|
||||
|
||||
if opts["mode"] == self.MODE_COPY:
|
||||
self.log.debug("Copying file ... {} -> {}".format(src, dst))
|
||||
copyfile(src, dst)
|
||||
elif opts["mode"] == self.MODE_HARDLINK:
|
||||
self.log.debug("Hardlinking file ... {} -> {}".format(
|
||||
src, dst))
|
||||
create_hard_link(src, dst)
|
||||
|
||||
self._transferred.append(dst)
|
||||
|
||||
def finalize(self):
|
||||
# Delete any backed up files
|
||||
for backup in self._backup_to_original.keys():
|
||||
try:
|
||||
os.remove(backup)
|
||||
except OSError:
|
||||
self.log.error(
|
||||
"Failed to remove backup file: {}".format(backup),
|
||||
exc_info=True)
|
||||
|
||||
def rollback(self):
|
||||
errors = 0
|
||||
# Rollback any transferred files
|
||||
for path in self._transferred:
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
errors += 1
|
||||
self.log.error(
|
||||
"Failed to rollback created file: {}".format(path),
|
||||
exc_info=True)
|
||||
|
||||
# Rollback the backups
|
||||
for backup, original in self._backup_to_original.items():
|
||||
try:
|
||||
os.rename(backup, original)
|
||||
except OSError:
|
||||
errors += 1
|
||||
self.log.error(
|
||||
"Failed to restore original file: {} -> {}".format(
|
||||
backup, original),
|
||||
exc_info=True)
|
||||
|
||||
if errors:
|
||||
self.log.error(
|
||||
"{} errors occurred during rollback.".format(errors),
|
||||
exc_info=True)
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
@property
|
||||
def transferred(self):
|
||||
"""Return the processed transfers destination paths"""
|
||||
return list(self._transferred)
|
||||
|
||||
@property
|
||||
def backups(self):
|
||||
"""Return the backup file paths"""
|
||||
return list(self._backup_to_original.keys())
|
||||
|
||||
def _create_folder_for_file(self, path):
|
||||
dirname = os.path.dirname(path)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
self.log.critical("An unexpected error occurred.")
|
||||
six.reraise(*sys.exc_info())
|
||||
|
||||
def _same_paths(self, src, dst):
|
||||
# handles same paths but with C:/project vs c:/project
|
||||
if os.path.exists(src) and os.path.exists(dst):
|
||||
return os.stat(src) == os.stat(dst)
|
||||
|
||||
return src == dst
|
||||
643
client/ayon_core/lib/local_settings.py
Normal file
643
client/ayon_core/lib/local_settings.py
Normal file
|
|
@ -0,0 +1,643 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package to deal with saving and retrieving user specific settings."""
|
||||
import os
|
||||
import json
|
||||
import getpass
|
||||
import platform
|
||||
from datetime import datetime
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
# TODO Use pype igniter logic instead of using duplicated code
|
||||
# disable lru cache in Python 2
|
||||
try:
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
def lru_cache(maxsize):
|
||||
def max_size(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
value = func(*args, **kwargs)
|
||||
return value
|
||||
return wrapper
|
||||
return max_size
|
||||
|
||||
# ConfigParser was renamed in python3 to configparser
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
|
||||
import six
|
||||
import appdirs
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.settings import (
|
||||
get_local_settings,
|
||||
get_system_settings
|
||||
)
|
||||
|
||||
from openpype.client.mongo import validate_mongo_connection
|
||||
from openpype.client import get_ayon_server_api_connection
|
||||
|
||||
_PLACEHOLDER = object()
|
||||
|
||||
|
||||
class OpenPypeSecureRegistry:
|
||||
"""Store information using keyring.
|
||||
|
||||
Registry should be used for private data that should be available only for
|
||||
user.
|
||||
|
||||
All passed registry names will have added prefix `OpenPype/` to easier
|
||||
identify which data were created by OpenPype.
|
||||
|
||||
Args:
|
||||
name(str): Name of registry used as identifier for data.
|
||||
"""
|
||||
def __init__(self, name):
|
||||
try:
|
||||
import keyring
|
||||
|
||||
except Exception:
|
||||
raise NotImplementedError(
|
||||
"Python module `keyring` is not available."
|
||||
)
|
||||
|
||||
# hack for cx_freeze and Windows keyring backend
|
||||
if platform.system().lower() == "windows":
|
||||
from keyring.backends import Windows
|
||||
|
||||
keyring.set_keyring(Windows.WinVaultKeyring())
|
||||
|
||||
# Force "OpenPype" prefix
|
||||
self._name = "/".join(("OpenPype", name))
|
||||
|
||||
def set_item(self, name, value):
|
||||
# type: (str, str) -> None
|
||||
"""Set sensitive item into system's keyring.
|
||||
|
||||
This uses `Keyring module`_ to save sensitive stuff into system's
|
||||
keyring.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
value (str): Value of the item.
|
||||
|
||||
.. _Keyring module:
|
||||
https://github.com/jaraco/keyring
|
||||
|
||||
"""
|
||||
import keyring
|
||||
|
||||
keyring.set_password(self._name, name, value)
|
||||
|
||||
@lru_cache(maxsize=32)
|
||||
def get_item(self, name, default=_PLACEHOLDER):
|
||||
"""Get value of sensitive item from system's keyring.
|
||||
|
||||
See also `Keyring module`_
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
default (Any): Default value if item is not available.
|
||||
|
||||
Returns:
|
||||
value (str): Value of the item.
|
||||
|
||||
Raises:
|
||||
ValueError: If item doesn't exist and default is not defined.
|
||||
|
||||
.. _Keyring module:
|
||||
https://github.com/jaraco/keyring
|
||||
|
||||
"""
|
||||
import keyring
|
||||
|
||||
value = keyring.get_password(self._name, name)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
if default is not _PLACEHOLDER:
|
||||
return default
|
||||
|
||||
# NOTE Should raise `KeyError`
|
||||
raise ValueError(
|
||||
"Item {}:{} does not exist in keyring.".format(self._name, name)
|
||||
)
|
||||
|
||||
def delete_item(self, name):
|
||||
# type: (str) -> None
|
||||
"""Delete value stored in system's keyring.
|
||||
|
||||
See also `Keyring module`_
|
||||
|
||||
Args:
|
||||
name (str): Name of the item to be deleted.
|
||||
|
||||
.. _Keyring module:
|
||||
https://github.com/jaraco/keyring
|
||||
|
||||
"""
|
||||
import keyring
|
||||
|
||||
self.get_item.cache_clear()
|
||||
keyring.delete_password(self._name, name)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class ASettingRegistry():
|
||||
"""Abstract class defining structure of **SettingRegistry** class.
|
||||
|
||||
It is implementing methods to store secure items into keyring, otherwise
|
||||
mechanism for storing common items must be implemented in abstract
|
||||
methods.
|
||||
|
||||
Attributes:
|
||||
_name (str): Registry names.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
# type: (str) -> ASettingRegistry
|
||||
super(ASettingRegistry, self).__init__()
|
||||
|
||||
self._name = name
|
||||
self._items = {}
|
||||
|
||||
def set_item(self, name, value):
|
||||
# type: (str, str) -> None
|
||||
"""Set item to settings registry.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
value (str): Value of the item.
|
||||
|
||||
"""
|
||||
self._set_item(name, value)
|
||||
|
||||
@abstractmethod
|
||||
def _set_item(self, name, value):
|
||||
# type: (str, str) -> None
|
||||
# Implement it
|
||||
pass
|
||||
|
||||
def __setitem__(self, name, value):
|
||||
self._items[name] = value
|
||||
self._set_item(name, value)
|
||||
|
||||
def get_item(self, name):
|
||||
# type: (str) -> str
|
||||
"""Get item from settings registry.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
|
||||
Returns:
|
||||
value (str): Value of the item.
|
||||
|
||||
Raises:
|
||||
ValueError: If item doesn't exist.
|
||||
|
||||
"""
|
||||
return self._get_item(name)
|
||||
|
||||
@abstractmethod
|
||||
def _get_item(self, name):
|
||||
# type: (str) -> str
|
||||
# Implement it
|
||||
pass
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self._get_item(name)
|
||||
|
||||
def delete_item(self, name):
|
||||
# type: (str) -> None
|
||||
"""Delete item from settings registry.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
|
||||
"""
|
||||
self._delete_item(name)
|
||||
|
||||
@abstractmethod
|
||||
def _delete_item(self, name):
|
||||
# type: (str) -> None
|
||||
"""Delete item from settings.
|
||||
|
||||
Note:
|
||||
see :meth:`openpype.lib.user_settings.ARegistrySettings.delete_item`
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def __delitem__(self, name):
|
||||
del self._items[name]
|
||||
self._delete_item(name)
|
||||
|
||||
|
||||
class IniSettingRegistry(ASettingRegistry):
|
||||
"""Class using :mod:`configparser`.
|
||||
|
||||
This class is using :mod:`configparser` (ini) files to store items.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name, path):
|
||||
# type: (str, str) -> IniSettingRegistry
|
||||
super(IniSettingRegistry, self).__init__(name)
|
||||
# get registry file
|
||||
version = os.getenv("OPENPYPE_VERSION", "N/A")
|
||||
self._registry_file = os.path.join(path, "{}.ini".format(name))
|
||||
if not os.path.exists(self._registry_file):
|
||||
with open(self._registry_file, mode="w") as cfg:
|
||||
print("# Settings registry", cfg)
|
||||
print("# Generated by OpenPype {}".format(version), cfg)
|
||||
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
print("# {}".format(now), cfg)
|
||||
|
||||
def set_item_section(
|
||||
self, section, name, value):
|
||||
# type: (str, str, str) -> None
|
||||
"""Set item to specific section of ini registry.
|
||||
|
||||
If section doesn't exists, it is created.
|
||||
|
||||
Args:
|
||||
section (str): Name of section.
|
||||
name (str): Name of the item.
|
||||
value (str): Value of the item.
|
||||
|
||||
"""
|
||||
value = str(value)
|
||||
config = configparser.ConfigParser()
|
||||
|
||||
config.read(self._registry_file)
|
||||
if not config.has_section(section):
|
||||
config.add_section(section)
|
||||
current = config[section]
|
||||
current[name] = value
|
||||
|
||||
with open(self._registry_file, mode="w") as cfg:
|
||||
config.write(cfg)
|
||||
|
||||
def _set_item(self, name, value):
|
||||
# type: (str, str) -> None
|
||||
self.set_item_section("MAIN", name, value)
|
||||
|
||||
def set_item(self, name, value):
|
||||
# type: (str, str) -> None
|
||||
"""Set item to settings ini file.
|
||||
|
||||
This saves item to ``DEFAULT`` section of ini as each item there
|
||||
must reside in some section.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
value (str): Value of the item.
|
||||
|
||||
"""
|
||||
# this does the some, overridden just for different docstring.
|
||||
# we cast value to str as ini options values must be strings.
|
||||
super(IniSettingRegistry, self).set_item(name, str(value))
|
||||
|
||||
def get_item(self, name):
|
||||
# type: (str) -> str
|
||||
"""Gets item from settings ini file.
|
||||
|
||||
This gets settings from ``DEFAULT`` section of ini file as each item
|
||||
there must reside in some section.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
|
||||
Returns:
|
||||
str: Value of item.
|
||||
|
||||
Raises:
|
||||
ValueError: If value doesn't exist.
|
||||
|
||||
"""
|
||||
return super(IniSettingRegistry, self).get_item(name)
|
||||
|
||||
@lru_cache(maxsize=32)
|
||||
def get_item_from_section(self, section, name):
|
||||
# type: (str, str) -> str
|
||||
"""Get item from section of ini file.
|
||||
|
||||
This will read ini file and try to get item value from specified
|
||||
section. If that section or item doesn't exist, :exc:`ValueError`
|
||||
is risen.
|
||||
|
||||
Args:
|
||||
section (str): Name of ini section.
|
||||
name (str): Name of the item.
|
||||
|
||||
Returns:
|
||||
str: Item value.
|
||||
|
||||
Raises:
|
||||
ValueError: If value doesn't exist.
|
||||
|
||||
"""
|
||||
config = configparser.ConfigParser()
|
||||
config.read(self._registry_file)
|
||||
try:
|
||||
value = config[section][name]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
"Registry doesn't contain value {}:{}".format(section, name))
|
||||
return value
|
||||
|
||||
def _get_item(self, name):
|
||||
# type: (str) -> str
|
||||
return self.get_item_from_section("MAIN", name)
|
||||
|
||||
def delete_item_from_section(self, section, name):
|
||||
# type: (str, str) -> None
|
||||
"""Delete item from section in ini file.
|
||||
|
||||
Args:
|
||||
section (str): Section name.
|
||||
name (str): Name of the item.
|
||||
|
||||
Raises:
|
||||
ValueError: If item doesn't exist.
|
||||
|
||||
"""
|
||||
self.get_item_from_section.cache_clear()
|
||||
config = configparser.ConfigParser()
|
||||
config.read(self._registry_file)
|
||||
try:
|
||||
_ = config[section][name]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
"Registry doesn't contain value {}:{}".format(section, name))
|
||||
config.remove_option(section, name)
|
||||
|
||||
# if section is empty, delete it
|
||||
if len(config[section].keys()) == 0:
|
||||
config.remove_section(section)
|
||||
|
||||
with open(self._registry_file, mode="w") as cfg:
|
||||
config.write(cfg)
|
||||
|
||||
def _delete_item(self, name):
|
||||
"""Delete item from default section.
|
||||
|
||||
Note:
|
||||
See :meth:`~openpype.lib.IniSettingsRegistry.delete_item_from_section`
|
||||
|
||||
"""
|
||||
self.delete_item_from_section("MAIN", name)
|
||||
|
||||
|
||||
class JSONSettingRegistry(ASettingRegistry):
|
||||
"""Class using json file as storage."""
|
||||
|
||||
def __init__(self, name, path):
|
||||
# type: (str, str) -> JSONSettingRegistry
|
||||
super(JSONSettingRegistry, self).__init__(name)
|
||||
#: str: name of registry file
|
||||
self._registry_file = os.path.join(path, "{}.json".format(name))
|
||||
now = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
header = {
|
||||
"__metadata__": {
|
||||
"openpype-version": os.getenv("OPENPYPE_VERSION", "N/A"),
|
||||
"generated": now
|
||||
},
|
||||
"registry": {}
|
||||
}
|
||||
|
||||
if not os.path.exists(os.path.dirname(self._registry_file)):
|
||||
os.makedirs(os.path.dirname(self._registry_file), exist_ok=True)
|
||||
if not os.path.exists(self._registry_file):
|
||||
with open(self._registry_file, mode="w") as cfg:
|
||||
json.dump(header, cfg, indent=4)
|
||||
|
||||
@lru_cache(maxsize=32)
|
||||
def _get_item(self, name):
|
||||
# type: (str) -> object
|
||||
"""Get item value from registry json.
|
||||
|
||||
Note:
|
||||
See :meth:`openpype.lib.JSONSettingRegistry.get_item`
|
||||
|
||||
"""
|
||||
with open(self._registry_file, mode="r") as cfg:
|
||||
data = json.load(cfg)
|
||||
try:
|
||||
value = data["registry"][name]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
"Registry doesn't contain value {}".format(name))
|
||||
return value
|
||||
|
||||
def get_item(self, name):
|
||||
# type: (str) -> object
|
||||
"""Get item value from registry json.
|
||||
|
||||
Args:
|
||||
name (str): Name of the item.
|
||||
|
||||
Returns:
|
||||
value of the item
|
||||
|
||||
Raises:
|
||||
ValueError: If item is not found in registry file.
|
||||
|
||||
"""
|
||||
return self._get_item(name)
|
||||
|
||||
def _set_item(self, name, value):
|
||||
# type: (str, object) -> None
|
||||
"""Set item value to registry json.
|
||||
|
||||
Note:
|
||||
See :meth:`openpype.lib.JSONSettingRegistry.set_item`
|
||||
|
||||
"""
|
||||
with open(self._registry_file, "r+") as cfg:
|
||||
data = json.load(cfg)
|
||||
data["registry"][name] = value
|
||||
cfg.truncate(0)
|
||||
cfg.seek(0)
|
||||
json.dump(data, cfg, indent=4)
|
||||
|
||||
def set_item(self, name, value):
|
||||
# type: (str, object) -> None
|
||||
"""Set item and its value into json registry file.
|
||||
|
||||
Args:
|
||||
name (str): name of the item.
|
||||
value (Any): value of the item.
|
||||
|
||||
"""
|
||||
self._set_item(name, value)
|
||||
|
||||
def _delete_item(self, name):
|
||||
# type: (str) -> None
|
||||
self._get_item.cache_clear()
|
||||
with open(self._registry_file, "r+") as cfg:
|
||||
data = json.load(cfg)
|
||||
del data["registry"][name]
|
||||
cfg.truncate(0)
|
||||
cfg.seek(0)
|
||||
json.dump(data, cfg, indent=4)
|
||||
|
||||
|
||||
class OpenPypeSettingsRegistry(JSONSettingRegistry):
|
||||
"""Class handling OpenPype general settings registry.
|
||||
|
||||
Attributes:
|
||||
vendor (str): Name used for path construction.
|
||||
product (str): Additional name used for path construction.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, name=None):
|
||||
if AYON_SERVER_ENABLED:
|
||||
vendor = "Ynput"
|
||||
product = "AYON"
|
||||
default_name = "AYON_settings"
|
||||
else:
|
||||
vendor = "pypeclub"
|
||||
product = "openpype"
|
||||
default_name = "openpype_settings"
|
||||
self.vendor = vendor
|
||||
self.product = product
|
||||
if not name:
|
||||
name = default_name
|
||||
path = appdirs.user_data_dir(self.product, self.vendor)
|
||||
super(OpenPypeSettingsRegistry, self).__init__(name, path)
|
||||
|
||||
|
||||
def _create_local_site_id(registry=None):
|
||||
"""Create a local site identifier."""
|
||||
from coolname import generate_slug
|
||||
|
||||
if registry is None:
|
||||
registry = OpenPypeSettingsRegistry()
|
||||
|
||||
new_id = generate_slug(3)
|
||||
|
||||
print("Created local site id \"{}\"".format(new_id))
|
||||
|
||||
registry.set_item("localId", new_id)
|
||||
|
||||
return new_id
|
||||
|
||||
|
||||
def get_ayon_appdirs(*args):
|
||||
"""Local app data directory of AYON client.
|
||||
|
||||
Args:
|
||||
*args (Iterable[str]): Subdirectories/files in local app data dir.
|
||||
|
||||
Returns:
|
||||
str: Path to directory/file in local app data dir.
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
*args
|
||||
)
|
||||
|
||||
|
||||
def _get_ayon_local_site_id():
|
||||
# used for background syncing
|
||||
site_id = os.environ.get("AYON_SITE_ID")
|
||||
if site_id:
|
||||
return site_id
|
||||
|
||||
site_id_path = get_ayon_appdirs("site_id")
|
||||
if os.path.exists(site_id_path):
|
||||
with open(site_id_path, "r") as stream:
|
||||
site_id = stream.read()
|
||||
|
||||
if site_id:
|
||||
return site_id
|
||||
|
||||
try:
|
||||
from ayon_common.utils import get_local_site_id as _get_local_site_id
|
||||
site_id = _get_local_site_id()
|
||||
except ImportError:
|
||||
raise ValueError("Couldn't access local site id")
|
||||
|
||||
return site_id
|
||||
|
||||
|
||||
def get_local_site_id():
|
||||
"""Get local site identifier.
|
||||
|
||||
Identifier is created if does not exists yet.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return _get_ayon_local_site_id()
|
||||
|
||||
# override local id from environment
|
||||
# used for background syncing
|
||||
if os.environ.get("OPENPYPE_LOCAL_ID"):
|
||||
return os.environ["OPENPYPE_LOCAL_ID"]
|
||||
|
||||
registry = OpenPypeSettingsRegistry()
|
||||
try:
|
||||
return registry.get_item("localId")
|
||||
except ValueError:
|
||||
return _create_local_site_id()
|
||||
|
||||
|
||||
def change_openpype_mongo_url(new_mongo_url):
|
||||
"""Change mongo url in pype registry.
|
||||
|
||||
Change of OpenPype mongo URL require restart of running pype processes or
|
||||
processes using pype.
|
||||
"""
|
||||
|
||||
validate_mongo_connection(new_mongo_url)
|
||||
key = "openPypeMongo"
|
||||
registry = OpenPypeSecureRegistry("mongodb")
|
||||
existing_value = registry.get_item(key, None)
|
||||
if existing_value is not None:
|
||||
registry.delete_item(key)
|
||||
registry.set_item(key, new_mongo_url)
|
||||
|
||||
|
||||
def get_openpype_username():
|
||||
"""OpenPype username used for templates and publishing.
|
||||
|
||||
May be different than machine's username.
|
||||
|
||||
Always returns "OPENPYPE_USERNAME" environment if is set then tries local
|
||||
settings and last option is to use `getpass.getuser()` which returns
|
||||
machine username.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
con = get_ayon_server_api_connection()
|
||||
return con.get_user()["name"]
|
||||
|
||||
username = os.environ.get("OPENPYPE_USERNAME")
|
||||
if not username:
|
||||
local_settings = get_local_settings()
|
||||
username = (
|
||||
local_settings
|
||||
.get("general", {})
|
||||
.get("username")
|
||||
)
|
||||
if not username:
|
||||
username = getpass.getuser()
|
||||
return username
|
||||
|
||||
|
||||
def is_admin_password_required():
|
||||
system_settings = get_system_settings()
|
||||
password = system_settings["general"].get("admin_password")
|
||||
if not password:
|
||||
return False
|
||||
|
||||
local_settings = get_local_settings()
|
||||
is_admin = local_settings.get("general", {}).get("is_admin", False)
|
||||
if is_admin:
|
||||
return False
|
||||
return True
|
||||
494
client/ayon_core/lib/log.py
Normal file
494
client/ayon_core/lib/log.py
Normal file
|
|
@ -0,0 +1,494 @@
|
|||
"""
|
||||
Logging to console and to mongo. For mongo logging, you need to set either
|
||||
``OPENPYPE_LOG_MONGO_URL`` to something like:
|
||||
|
||||
.. example::
|
||||
mongo://user:password@hostname:port/database/collection?authSource=avalon
|
||||
|
||||
or set ``OPENPYPE_LOG_MONGO_HOST`` and other variables.
|
||||
See :func:`_mongo_settings`
|
||||
|
||||
Best place for it is in ``repos/pype-config/environments/global.json``
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import threading
|
||||
import copy
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.client.mongo import (
|
||||
MongoEnvNotSet,
|
||||
get_default_components,
|
||||
OpenPypeMongoConnection,
|
||||
)
|
||||
from . import Terminal
|
||||
|
||||
try:
|
||||
import log4mongo
|
||||
from log4mongo.handlers import MongoHandler
|
||||
except ImportError:
|
||||
log4mongo = None
|
||||
MongoHandler = type("NOT_SET", (), {})
|
||||
|
||||
# Check for `unicode` in builtins
|
||||
USE_UNICODE = hasattr(__builtins__, "unicode")
|
||||
|
||||
|
||||
class LogStreamHandler(logging.StreamHandler):
|
||||
""" StreamHandler class designed to handle utf errors in python 2.x hosts.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, stream=None):
|
||||
super(LogStreamHandler, self).__init__(stream)
|
||||
self.enabled = True
|
||||
|
||||
def enable(self):
|
||||
""" Enable StreamHandler
|
||||
|
||||
Used to silence output
|
||||
"""
|
||||
self.enabled = True
|
||||
|
||||
def disable(self):
|
||||
""" Disable StreamHandler
|
||||
|
||||
Make StreamHandler output again
|
||||
"""
|
||||
self.enabled = False
|
||||
|
||||
def emit(self, record):
|
||||
if not self.enable:
|
||||
return
|
||||
try:
|
||||
msg = self.format(record)
|
||||
msg = Terminal.log(msg)
|
||||
stream = self.stream
|
||||
if stream is None:
|
||||
return
|
||||
fs = "%s\n"
|
||||
# if no unicode support...
|
||||
if not USE_UNICODE:
|
||||
stream.write(fs % msg)
|
||||
else:
|
||||
try:
|
||||
if (isinstance(msg, unicode) and # noqa: F821
|
||||
getattr(stream, 'encoding', None)):
|
||||
ufs = u'%s\n'
|
||||
try:
|
||||
stream.write(ufs % msg)
|
||||
except UnicodeEncodeError:
|
||||
stream.write((ufs % msg).encode(stream.encoding))
|
||||
else:
|
||||
if (getattr(stream, 'encoding', 'utf-8')):
|
||||
ufs = u'%s\n'
|
||||
stream.write(ufs % unicode(msg)) # noqa: F821
|
||||
else:
|
||||
stream.write(fs % msg)
|
||||
except UnicodeError:
|
||||
stream.write(fs % msg.encode("UTF-8"))
|
||||
self.flush()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
|
||||
except OSError:
|
||||
self.handleError(record)
|
||||
|
||||
except Exception:
|
||||
print(repr(record))
|
||||
self.handleError(record)
|
||||
|
||||
|
||||
class LogFormatter(logging.Formatter):
|
||||
|
||||
DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ]'
|
||||
default_formatter = logging.Formatter(DFT)
|
||||
|
||||
def __init__(self, formats):
|
||||
super(LogFormatter, self).__init__()
|
||||
self.formatters = {}
|
||||
for loglevel in formats:
|
||||
self.formatters[loglevel] = logging.Formatter(formats[loglevel])
|
||||
|
||||
def format(self, record):
|
||||
formatter = self.formatters.get(record.levelno, self.default_formatter)
|
||||
|
||||
_exc_info = record.exc_info
|
||||
record.exc_info = None
|
||||
|
||||
out = formatter.format(record)
|
||||
record.exc_info = _exc_info
|
||||
|
||||
if record.exc_info is not None:
|
||||
line_len = len(str(record.exc_info[1]))
|
||||
if line_len > 30:
|
||||
line_len = 30
|
||||
out = "{}\n{}\n{}\n{}\n{}".format(
|
||||
out,
|
||||
line_len * "=",
|
||||
str(record.exc_info[1]),
|
||||
line_len * "=",
|
||||
self.formatException(record.exc_info)
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
class MongoFormatter(logging.Formatter):
|
||||
|
||||
DEFAULT_PROPERTIES = logging.LogRecord(
|
||||
'', '', '', '', '', '', '', '').__dict__.keys()
|
||||
|
||||
def format(self, record):
|
||||
"""Formats LogRecord into python dictionary."""
|
||||
# Standard document
|
||||
document = {
|
||||
'timestamp': datetime.datetime.now(),
|
||||
'level': record.levelname,
|
||||
'thread': record.thread,
|
||||
'threadName': record.threadName,
|
||||
'message': record.getMessage(),
|
||||
'loggerName': record.name,
|
||||
'fileName': record.pathname,
|
||||
'module': record.module,
|
||||
'method': record.funcName,
|
||||
'lineNumber': record.lineno
|
||||
}
|
||||
document.update(Logger.get_process_data())
|
||||
|
||||
# Standard document decorated with exception info
|
||||
if record.exc_info is not None:
|
||||
document['exception'] = {
|
||||
'message': str(record.exc_info[1]),
|
||||
'code': 0,
|
||||
'stackTrace': self.formatException(record.exc_info)
|
||||
}
|
||||
|
||||
# Standard document decorated with extra contextual information
|
||||
if len(self.DEFAULT_PROPERTIES) != len(record.__dict__):
|
||||
contextual_extra = set(record.__dict__).difference(
|
||||
set(self.DEFAULT_PROPERTIES))
|
||||
if contextual_extra:
|
||||
for key in contextual_extra:
|
||||
document[key] = record.__dict__[key]
|
||||
return document
|
||||
|
||||
|
||||
class Logger:
|
||||
DFT = '%(levelname)s >>> { %(name)s }: [ %(message)s ] '
|
||||
DBG = " - { %(name)s }: [ %(message)s ] "
|
||||
INF = ">>> [ %(message)s ] "
|
||||
WRN = "*** WRN: >>> { %(name)s }: [ %(message)s ] "
|
||||
ERR = "!!! ERR: %(asctime)s >>> { %(name)s }: [ %(message)s ] "
|
||||
CRI = "!!! CRI: %(asctime)s >>> { %(name)s }: [ %(message)s ] "
|
||||
|
||||
FORMAT_FILE = {
|
||||
logging.INFO: INF,
|
||||
logging.DEBUG: DBG,
|
||||
logging.WARNING: WRN,
|
||||
logging.ERROR: ERR,
|
||||
logging.CRITICAL: CRI,
|
||||
}
|
||||
|
||||
# Is static class initialized
|
||||
bootstraped = False
|
||||
initialized = False
|
||||
_init_lock = threading.Lock()
|
||||
|
||||
# Defines if mongo logging should be used
|
||||
use_mongo_logging = None
|
||||
mongo_process_id = None
|
||||
|
||||
# Backwards compatibility - was used in start.py
|
||||
# TODO remove when all old builds are replaced with new one
|
||||
# not using 'log_mongo_url_components'
|
||||
log_mongo_url_components = None
|
||||
|
||||
# Database name in Mongo
|
||||
log_database_name = os.environ.get("OPENPYPE_DATABASE_NAME")
|
||||
# Collection name under database in Mongo
|
||||
log_collection_name = "logs"
|
||||
|
||||
# Logging level - OPENPYPE_LOG_LEVEL
|
||||
log_level = None
|
||||
|
||||
# Data same for all record documents
|
||||
process_data = None
|
||||
# Cached process name or ability to set different process name
|
||||
_process_name = None
|
||||
|
||||
@classmethod
|
||||
def get_logger(cls, name=None, _host=None):
|
||||
if not cls.initialized:
|
||||
cls.initialize()
|
||||
|
||||
logger = logging.getLogger(name or "__main__")
|
||||
|
||||
logger.setLevel(cls.log_level)
|
||||
|
||||
add_mongo_handler = cls.use_mongo_logging
|
||||
add_console_handler = True
|
||||
|
||||
for handler in logger.handlers:
|
||||
if isinstance(handler, MongoHandler):
|
||||
add_mongo_handler = False
|
||||
elif isinstance(handler, LogStreamHandler):
|
||||
add_console_handler = False
|
||||
|
||||
if add_console_handler:
|
||||
logger.addHandler(cls._get_console_handler())
|
||||
|
||||
if add_mongo_handler:
|
||||
try:
|
||||
handler = cls._get_mongo_handler()
|
||||
if handler:
|
||||
logger.addHandler(handler)
|
||||
|
||||
except MongoEnvNotSet:
|
||||
# Skip if mongo environments are not set yet
|
||||
cls.use_mongo_logging = False
|
||||
|
||||
except Exception:
|
||||
lines = traceback.format_exception(*sys.exc_info())
|
||||
for line in lines:
|
||||
if line.endswith("\n"):
|
||||
line = line[:-1]
|
||||
Terminal.echo(line)
|
||||
cls.use_mongo_logging = False
|
||||
|
||||
# Do not propagate logs to root logger
|
||||
logger.propagate = False
|
||||
|
||||
if _host is not None:
|
||||
# Warn about deprecated argument
|
||||
# TODO remove backwards compatibility of host argument which is
|
||||
# not used for more than a year
|
||||
logger.warning(
|
||||
"Logger \"{}\" is using argument `host` on `get_logger`"
|
||||
" which is deprecated. Please remove as backwards"
|
||||
" compatibility will be removed soon."
|
||||
)
|
||||
return logger
|
||||
|
||||
@classmethod
|
||||
def _get_mongo_handler(cls):
|
||||
cls.bootstrap_mongo_log()
|
||||
|
||||
if not cls.use_mongo_logging:
|
||||
return
|
||||
|
||||
components = get_default_components()
|
||||
kwargs = {
|
||||
"host": components["host"],
|
||||
"database_name": cls.log_database_name,
|
||||
"collection": cls.log_collection_name,
|
||||
"username": components["username"],
|
||||
"password": components["password"],
|
||||
"capped": True,
|
||||
"formatter": MongoFormatter()
|
||||
}
|
||||
if components["port"] is not None:
|
||||
kwargs["port"] = int(components["port"])
|
||||
if components["auth_db"]:
|
||||
kwargs["authentication_db"] = components["auth_db"]
|
||||
|
||||
return MongoHandler(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def _get_console_handler(cls):
|
||||
formatter = LogFormatter(cls.FORMAT_FILE)
|
||||
console_handler = LogStreamHandler()
|
||||
|
||||
console_handler.set_name("LogStreamHandler")
|
||||
console_handler.setFormatter(formatter)
|
||||
return console_handler
|
||||
|
||||
@classmethod
|
||||
def initialize(cls):
|
||||
# TODO update already created loggers on re-initialization
|
||||
if not cls._init_lock.locked():
|
||||
with cls._init_lock:
|
||||
cls._initialize()
|
||||
else:
|
||||
# If lock is locked wait until is finished
|
||||
while cls._init_lock.locked():
|
||||
time.sleep(0.1)
|
||||
|
||||
@classmethod
|
||||
def _initialize(cls):
|
||||
# Change initialization state to prevent runtime changes
|
||||
# if is executed during runtime
|
||||
cls.initialized = False
|
||||
if not AYON_SERVER_ENABLED:
|
||||
cls.log_mongo_url_components = get_default_components()
|
||||
|
||||
# Define if should logging to mongo be used
|
||||
if AYON_SERVER_ENABLED:
|
||||
use_mongo_logging = False
|
||||
else:
|
||||
use_mongo_logging = (
|
||||
log4mongo is not None
|
||||
and os.environ.get("OPENPYPE_LOG_TO_SERVER") == "1"
|
||||
)
|
||||
|
||||
# Set mongo id for process (ONLY ONCE)
|
||||
if use_mongo_logging and cls.mongo_process_id is None:
|
||||
try:
|
||||
from bson.objectid import ObjectId
|
||||
except Exception:
|
||||
use_mongo_logging = False
|
||||
|
||||
# Check if mongo id was passed with environments and pop it
|
||||
# - This is for subprocesses that are part of another process
|
||||
# like Ftrack event server has 3 other subprocesses that should
|
||||
# use same mongo id
|
||||
if use_mongo_logging:
|
||||
mongo_id = os.environ.pop("OPENPYPE_PROCESS_MONGO_ID", None)
|
||||
if not mongo_id:
|
||||
# Create new object id
|
||||
mongo_id = ObjectId()
|
||||
else:
|
||||
# Convert string to ObjectId object
|
||||
mongo_id = ObjectId(mongo_id)
|
||||
cls.mongo_process_id = mongo_id
|
||||
|
||||
# Store result to class definition
|
||||
cls.use_mongo_logging = use_mongo_logging
|
||||
|
||||
# Define what is logging level
|
||||
log_level = os.getenv("OPENPYPE_LOG_LEVEL")
|
||||
if not log_level:
|
||||
# Check OPENPYPE_DEBUG for backwards compatibility
|
||||
op_debug = os.getenv("OPENPYPE_DEBUG")
|
||||
if op_debug and int(op_debug) > 0:
|
||||
log_level = 10
|
||||
else:
|
||||
log_level = 20
|
||||
cls.log_level = int(log_level)
|
||||
|
||||
if not os.environ.get("OPENPYPE_MONGO"):
|
||||
cls.use_mongo_logging = False
|
||||
|
||||
# Mark as initialized
|
||||
cls.initialized = True
|
||||
|
||||
@classmethod
|
||||
def get_process_data(cls):
|
||||
"""Data about current process which should be same for all records.
|
||||
|
||||
Process data are used for each record sent to mongo database.
|
||||
"""
|
||||
if cls.process_data is not None:
|
||||
return copy.deepcopy(cls.process_data)
|
||||
|
||||
if not cls.initialized:
|
||||
cls.initialize()
|
||||
|
||||
host_name = socket.gethostname()
|
||||
try:
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
except socket.gaierror:
|
||||
host_ip = "127.0.0.1"
|
||||
|
||||
process_name = cls.get_process_name()
|
||||
|
||||
cls.process_data = {
|
||||
"process_id": cls.mongo_process_id,
|
||||
"hostname": host_name,
|
||||
"hostip": host_ip,
|
||||
"username": getpass.getuser(),
|
||||
"system_name": platform.system(),
|
||||
"process_name": process_name
|
||||
}
|
||||
return copy.deepcopy(cls.process_data)
|
||||
|
||||
@classmethod
|
||||
def set_process_name(cls, process_name):
|
||||
"""Set process name for mongo logs."""
|
||||
# Just change the attribute
|
||||
cls._process_name = process_name
|
||||
# Update process data if are already set
|
||||
if cls.process_data is not None:
|
||||
cls.process_data["process_name"] = process_name
|
||||
|
||||
@classmethod
|
||||
def get_process_name(cls):
|
||||
"""Process name that is like "label" of a process.
|
||||
|
||||
OpenPype's logging can be used from OpenPyppe itself of from hosts.
|
||||
Even in OpenPype process it's good to know if logs are from tray or
|
||||
from other cli commands. This should help to identify that information.
|
||||
"""
|
||||
if cls._process_name is not None:
|
||||
return cls._process_name
|
||||
|
||||
# Get process name
|
||||
process_name = os.environ.get("AVALON_APP_NAME")
|
||||
if not process_name:
|
||||
try:
|
||||
import psutil
|
||||
process = psutil.Process(os.getpid())
|
||||
process_name = process.name()
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if not process_name:
|
||||
process_name = os.path.basename(sys.executable)
|
||||
|
||||
cls._process_name = process_name
|
||||
return cls._process_name
|
||||
|
||||
@classmethod
|
||||
def bootstrap_mongo_log(cls):
|
||||
"""Prepare mongo logging."""
|
||||
if cls.bootstraped:
|
||||
return
|
||||
|
||||
if not cls.initialized:
|
||||
cls.initialize()
|
||||
|
||||
if not cls.use_mongo_logging:
|
||||
return
|
||||
|
||||
if not cls.log_database_name:
|
||||
raise ValueError("Database name for logs is not set")
|
||||
|
||||
client = log4mongo.handlers._connection
|
||||
if not client:
|
||||
client = cls.get_log_mongo_connection()
|
||||
# Set the client inside log4mongo handlers to not create another
|
||||
# mongo db connection.
|
||||
log4mongo.handlers._connection = client
|
||||
|
||||
logdb = client[cls.log_database_name]
|
||||
|
||||
collist = logdb.list_collection_names()
|
||||
if cls.log_collection_name not in collist:
|
||||
logdb.create_collection(
|
||||
cls.log_collection_name,
|
||||
capped=True,
|
||||
max=5000,
|
||||
size=1073741824
|
||||
)
|
||||
cls.bootstraped = True
|
||||
|
||||
@classmethod
|
||||
def get_log_mongo_connection(cls):
|
||||
"""Mongo connection that allows to get to log collection.
|
||||
|
||||
This is implemented to prevent multiple connections to mongo from same
|
||||
process.
|
||||
"""
|
||||
if not cls.initialized:
|
||||
cls.initialize()
|
||||
|
||||
return OpenPypeMongoConnection.get_mongo_client()
|
||||
302
client/ayon_core/lib/openpype_version.py
Normal file
302
client/ayon_core/lib/openpype_version.py
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
"""Lib access to OpenPypeVersion from igniter.
|
||||
|
||||
Access to logic from igniter is available only for OpenPype processes.
|
||||
Is meant to be able check OpenPype versions for studio. The logic is dependent
|
||||
on igniter's inner logic of versions.
|
||||
|
||||
Keep in mind that all functions except 'get_installed_version' does not return
|
||||
OpenPype version located in build but versions available in remote versions
|
||||
repository or locally available.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import openpype.version
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
|
||||
from .python_module_tools import import_filepath
|
||||
|
||||
|
||||
# ----------------------------------------
|
||||
# Functions independent on OpenPypeVersion
|
||||
# ----------------------------------------
|
||||
def get_openpype_version():
|
||||
"""Version of pype that is currently used."""
|
||||
return openpype.version.__version__
|
||||
|
||||
|
||||
def get_ayon_launcher_version():
|
||||
version_filepath = os.path.join(
|
||||
os.environ["AYON_ROOT"],
|
||||
"version.py"
|
||||
)
|
||||
if not os.path.exists(version_filepath):
|
||||
return None
|
||||
content = {}
|
||||
with open(version_filepath, "r") as stream:
|
||||
exec(stream.read(), content)
|
||||
return content["__version__"]
|
||||
|
||||
|
||||
def get_build_version():
|
||||
"""OpenPype version of build."""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return get_ayon_launcher_version()
|
||||
|
||||
# Return OpenPype version if is running from code
|
||||
if not is_running_from_build():
|
||||
return get_openpype_version()
|
||||
|
||||
# Import `version.py` from build directory
|
||||
version_filepath = os.path.join(
|
||||
os.environ["OPENPYPE_ROOT"],
|
||||
"openpype",
|
||||
"version.py"
|
||||
)
|
||||
if not os.path.exists(version_filepath):
|
||||
return None
|
||||
|
||||
module = import_filepath(version_filepath, "openpype_build_version")
|
||||
return getattr(module, "__version__", None)
|
||||
|
||||
|
||||
def is_running_from_build():
|
||||
"""Determine if current process is running from build or code.
|
||||
|
||||
Returns:
|
||||
bool: True if running from build.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
executable_path = os.environ["AYON_EXECUTABLE"]
|
||||
else:
|
||||
executable_path = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
executable_filename = os.path.basename(executable_path)
|
||||
if "python" in executable_filename.lower():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_staging_enabled():
|
||||
if AYON_SERVER_ENABLED:
|
||||
return os.getenv("AYON_USE_STAGING") == "1"
|
||||
return os.environ.get("OPENPYPE_USE_STAGING") == "1"
|
||||
|
||||
|
||||
def is_running_staging():
|
||||
"""Currently used OpenPype is staging version.
|
||||
|
||||
This function is not 100% proper check of staging version. It is possible
|
||||
to have enabled to use staging version but be in different one.
|
||||
|
||||
The function is based on 4 factors:
|
||||
- env 'OPENPYPE_IS_STAGING' is set
|
||||
- current production version
|
||||
- current staging version
|
||||
- use staging is enabled
|
||||
|
||||
First checks for 'OPENPYPE_IS_STAGING' environment which can be set to '1'.
|
||||
The value should be set only when a process without access to
|
||||
OpenPypeVersion is launched (e.g. in DCCs). If current version is same
|
||||
as production version it is expected that it is not staging, and it
|
||||
doesn't matter what would 'is_staging_enabled' return. If current version
|
||||
is same as staging version it is expected we're in staging. In all other
|
||||
cases 'is_staging_enabled' is used as source of outpu value.
|
||||
|
||||
The function is used to decide which icon is used. To check e.g. updates
|
||||
the output should be combined with other functions from this file.
|
||||
|
||||
Returns:
|
||||
bool: Using staging version or not.
|
||||
"""
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
return is_staging_enabled()
|
||||
|
||||
if os.environ.get("OPENPYPE_IS_STAGING") == "1":
|
||||
return True
|
||||
|
||||
if not op_version_control_available():
|
||||
return False
|
||||
|
||||
from openpype.settings import get_global_settings
|
||||
|
||||
global_settings = get_global_settings()
|
||||
production_version = global_settings["production_version"]
|
||||
latest_version = None
|
||||
if not production_version or production_version == "latest":
|
||||
latest_version = get_latest_version(local=False, remote=True)
|
||||
production_version = latest_version
|
||||
|
||||
current_version = get_openpype_version()
|
||||
if current_version == production_version:
|
||||
return False
|
||||
|
||||
staging_version = global_settings["staging_version"]
|
||||
if not staging_version or staging_version == "latest":
|
||||
if latest_version is None:
|
||||
latest_version = get_latest_version(local=False, remote=True)
|
||||
staging_version = latest_version
|
||||
|
||||
if current_version == staging_version:
|
||||
return True
|
||||
|
||||
return is_staging_enabled()
|
||||
|
||||
|
||||
# ----------------------------------------
|
||||
# Functions dependent on OpenPypeVersion
|
||||
# - Make sense to call only in OpenPype process
|
||||
# ----------------------------------------
|
||||
def get_OpenPypeVersion():
|
||||
"""Access to OpenPypeVersion class stored in sys modules."""
|
||||
return sys.modules.get("OpenPypeVersion")
|
||||
|
||||
|
||||
def op_version_control_available():
|
||||
"""Check if current process has access to OpenPypeVersion."""
|
||||
if get_OpenPypeVersion() is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_installed_version():
|
||||
"""Get OpenPype version inside build.
|
||||
|
||||
This version is not returned by any other functions here.
|
||||
"""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().get_installed_version()
|
||||
return None
|
||||
|
||||
|
||||
def get_available_versions(*args, **kwargs):
|
||||
"""Get list of available versions."""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().get_available_versions(
|
||||
*args, **kwargs
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def openpype_path_is_set():
|
||||
"""OpenPype repository path is set in settings."""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().openpype_path_is_set()
|
||||
return None
|
||||
|
||||
|
||||
def openpype_path_is_accessible():
|
||||
"""OpenPype version repository path can be accessed."""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().openpype_path_is_accessible()
|
||||
return None
|
||||
|
||||
|
||||
def get_local_versions(*args, **kwargs):
|
||||
"""OpenPype versions available on this workstation."""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().get_local_versions(*args, **kwargs)
|
||||
return None
|
||||
|
||||
|
||||
def get_remote_versions(*args, **kwargs):
|
||||
"""OpenPype versions in repository path."""
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().get_remote_versions(*args, **kwargs)
|
||||
return None
|
||||
|
||||
|
||||
def get_latest_version(local=None, remote=None):
|
||||
"""Get latest version from repository path."""
|
||||
|
||||
if op_version_control_available():
|
||||
return get_OpenPypeVersion().get_latest_version(
|
||||
local=local,
|
||||
remote=remote
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def get_expected_studio_version(staging=None):
|
||||
"""Expected production or staging version in studio."""
|
||||
if op_version_control_available():
|
||||
if staging is None:
|
||||
staging = is_staging_enabled()
|
||||
return get_OpenPypeVersion().get_expected_studio_version(staging)
|
||||
return None
|
||||
|
||||
|
||||
def get_expected_version(staging=None):
|
||||
expected_version = get_expected_studio_version(staging)
|
||||
if expected_version is None:
|
||||
# Look for latest if expected version is not set in settings
|
||||
expected_version = get_latest_version(
|
||||
local=False,
|
||||
remote=True
|
||||
)
|
||||
return expected_version
|
||||
|
||||
|
||||
def is_current_version_studio_latest():
|
||||
"""Is currently running OpenPype version which is defined by studio.
|
||||
|
||||
It is not recommended to ask in each process as there may be situations
|
||||
when older OpenPype should be used. For example on farm. But it does make
|
||||
sense in processes that can run for a long time.
|
||||
|
||||
Returns:
|
||||
None: Can't determine. e.g. when running from code or the build is
|
||||
too old.
|
||||
bool: True when is using studio
|
||||
"""
|
||||
output = None
|
||||
# Skip if is not running from build or build does not support version
|
||||
# control or path to folder with zip files is not accessible
|
||||
if (
|
||||
not is_running_from_build()
|
||||
or not op_version_control_available()
|
||||
or not openpype_path_is_accessible()
|
||||
):
|
||||
return output
|
||||
|
||||
# Get OpenPypeVersion class
|
||||
OpenPypeVersion = get_OpenPypeVersion()
|
||||
# Convert current version to OpenPypeVersion object
|
||||
current_version = OpenPypeVersion(version=get_openpype_version())
|
||||
|
||||
# Get expected version (from settings)
|
||||
expected_version = get_expected_version()
|
||||
# Check if current version is expected version
|
||||
return current_version == expected_version
|
||||
|
||||
|
||||
def is_current_version_higher_than_expected():
|
||||
"""Is current OpenPype version higher than version defined by studio.
|
||||
|
||||
Returns:
|
||||
None: Can't determine. e.g. when running from code or the build is
|
||||
too old.
|
||||
bool: True when is higher than studio version.
|
||||
"""
|
||||
output = None
|
||||
# Skip if is not running from build or build does not support version
|
||||
# control or path to folder with zip files is not accessible
|
||||
if (
|
||||
not is_running_from_build()
|
||||
or not op_version_control_available()
|
||||
or not openpype_path_is_accessible()
|
||||
):
|
||||
return output
|
||||
|
||||
# Get OpenPypeVersion class
|
||||
OpenPypeVersion = get_OpenPypeVersion()
|
||||
# Convert current version to OpenPypeVersion object
|
||||
current_version = OpenPypeVersion(version=get_openpype_version())
|
||||
|
||||
# Get expected version (from settings)
|
||||
expected_version = get_expected_version()
|
||||
# Check if current version is expected version
|
||||
return current_version > expected_version
|
||||
842
client/ayon_core/lib/path_templates.py
Normal file
842
client/ayon_core/lib/path_templates.py
Normal file
|
|
@ -0,0 +1,842 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
import numbers
|
||||
import collections
|
||||
|
||||
import six
|
||||
|
||||
KEY_PATTERN = re.compile(r"(\{.*?[^{0]*\})")
|
||||
KEY_PADDING_PATTERN = re.compile(r"([^:]+)\S+[><]\S+")
|
||||
SUB_DICT_PATTERN = re.compile(r"([^\[\]]+)")
|
||||
OPTIONAL_PATTERN = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
|
||||
|
||||
|
||||
def merge_dict(main_dict, enhance_dict):
|
||||
"""Merges dictionaries by keys.
|
||||
|
||||
Function call itself if value on key is again dictionary.
|
||||
|
||||
Args:
|
||||
main_dict (dict): First dict to merge second one into.
|
||||
enhance_dict (dict): Second dict to be merged.
|
||||
|
||||
Returns:
|
||||
dict: Merged result.
|
||||
|
||||
.. note:: does not overrides whole value on first found key
|
||||
but only values differences from enhance_dict
|
||||
|
||||
"""
|
||||
for key, value in enhance_dict.items():
|
||||
if key not in main_dict:
|
||||
main_dict[key] = value
|
||||
elif isinstance(value, dict) and isinstance(main_dict[key], dict):
|
||||
main_dict[key] = merge_dict(main_dict[key], value)
|
||||
else:
|
||||
main_dict[key] = value
|
||||
return main_dict
|
||||
|
||||
|
||||
class TemplateMissingKey(Exception):
|
||||
"""Exception for cases when key does not exist in template."""
|
||||
|
||||
msg = "Template key does not exist: `{}`."
|
||||
|
||||
def __init__(self, parents):
|
||||
parent_join = "".join(["[\"{0}\"]".format(key) for key in parents])
|
||||
super(TemplateMissingKey, self).__init__(
|
||||
self.msg.format(parent_join)
|
||||
)
|
||||
|
||||
|
||||
class TemplateUnsolved(Exception):
|
||||
"""Exception for unsolved template when strict is set to True."""
|
||||
|
||||
msg = "Template \"{0}\" is unsolved.{1}{2}"
|
||||
invalid_types_msg = " Keys with invalid DataType: `{0}`."
|
||||
missing_keys_msg = " Missing keys: \"{0}\"."
|
||||
|
||||
def __init__(self, template, missing_keys, invalid_types):
|
||||
invalid_type_items = []
|
||||
for _key, _type in invalid_types.items():
|
||||
invalid_type_items.append(
|
||||
"\"{0}\" {1}".format(_key, str(_type))
|
||||
)
|
||||
|
||||
invalid_types_msg = ""
|
||||
if invalid_type_items:
|
||||
invalid_types_msg = self.invalid_types_msg.format(
|
||||
", ".join(invalid_type_items)
|
||||
)
|
||||
|
||||
missing_keys_msg = ""
|
||||
if missing_keys:
|
||||
missing_keys_msg = self.missing_keys_msg.format(
|
||||
", ".join(missing_keys)
|
||||
)
|
||||
super(TemplateUnsolved, self).__init__(
|
||||
self.msg.format(template, missing_keys_msg, invalid_types_msg)
|
||||
)
|
||||
|
||||
|
||||
class StringTemplate(object):
|
||||
"""String that can be formatted."""
|
||||
def __init__(self, template):
|
||||
if not isinstance(template, six.string_types):
|
||||
raise TypeError("<{}> argument must be a string, not {}.".format(
|
||||
self.__class__.__name__, str(type(template))
|
||||
))
|
||||
|
||||
self._template = template
|
||||
parts = []
|
||||
last_end_idx = 0
|
||||
for item in KEY_PATTERN.finditer(template):
|
||||
start, end = item.span()
|
||||
if start > last_end_idx:
|
||||
parts.append(template[last_end_idx:start])
|
||||
parts.append(FormattingPart(template[start:end]))
|
||||
last_end_idx = end
|
||||
|
||||
if last_end_idx < len(template):
|
||||
parts.append(template[last_end_idx:len(template)])
|
||||
|
||||
new_parts = []
|
||||
for part in parts:
|
||||
if not isinstance(part, six.string_types):
|
||||
new_parts.append(part)
|
||||
continue
|
||||
|
||||
substr = ""
|
||||
for char in part:
|
||||
if char not in ("<", ">"):
|
||||
substr += char
|
||||
else:
|
||||
if substr:
|
||||
new_parts.append(substr)
|
||||
new_parts.append(char)
|
||||
substr = ""
|
||||
if substr:
|
||||
new_parts.append(substr)
|
||||
|
||||
self._parts = self.find_optional_parts(new_parts)
|
||||
|
||||
def __str__(self):
|
||||
return self.template
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}> {}".format(self.__class__.__name__, self.template)
|
||||
|
||||
def __contains__(self, other):
|
||||
return other in self.template
|
||||
|
||||
def replace(self, *args, **kwargs):
|
||||
self._template = self.template.replace(*args, **kwargs)
|
||||
return self
|
||||
|
||||
@property
|
||||
def template(self):
|
||||
return self._template
|
||||
|
||||
def format(self, data):
|
||||
""" Figure out with whole formatting.
|
||||
|
||||
Separate advanced keys (*Like '{project[name]}') from string which must
|
||||
be formatted separatelly in case of missing or incomplete keys in data.
|
||||
|
||||
Args:
|
||||
data (dict): Containing keys to be filled into template.
|
||||
|
||||
Returns:
|
||||
TemplateResult: Filled or partially filled template containing all
|
||||
data needed or missing for filling template.
|
||||
"""
|
||||
result = TemplatePartResult()
|
||||
for part in self._parts:
|
||||
if isinstance(part, six.string_types):
|
||||
result.add_output(part)
|
||||
else:
|
||||
part.format(data, result)
|
||||
|
||||
invalid_types = result.invalid_types
|
||||
invalid_types.update(result.invalid_optional_types)
|
||||
invalid_types = result.split_keys_to_subdicts(invalid_types)
|
||||
|
||||
missing_keys = result.missing_keys
|
||||
missing_keys |= result.missing_optional_keys
|
||||
|
||||
solved = result.solved
|
||||
used_values = result.get_clean_used_values()
|
||||
|
||||
return TemplateResult(
|
||||
result.output,
|
||||
self.template,
|
||||
solved,
|
||||
used_values,
|
||||
missing_keys,
|
||||
invalid_types
|
||||
)
|
||||
|
||||
def format_strict(self, *args, **kwargs):
|
||||
result = self.format(*args, **kwargs)
|
||||
result.validate()
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def format_template(cls, template, data):
|
||||
objected_template = cls(template)
|
||||
return objected_template.format(data)
|
||||
|
||||
@classmethod
|
||||
def format_strict_template(cls, template, data):
|
||||
objected_template = cls(template)
|
||||
return objected_template.format_strict(data)
|
||||
|
||||
@staticmethod
|
||||
def find_optional_parts(parts):
|
||||
new_parts = []
|
||||
tmp_parts = {}
|
||||
counted_symb = -1
|
||||
for part in parts:
|
||||
if part == "<":
|
||||
counted_symb += 1
|
||||
tmp_parts[counted_symb] = []
|
||||
|
||||
elif part == ">":
|
||||
if counted_symb > -1:
|
||||
parts = tmp_parts.pop(counted_symb)
|
||||
counted_symb -= 1
|
||||
# If part contains only single string keep value
|
||||
# unchanged
|
||||
if parts:
|
||||
# Remove optional start char
|
||||
parts.pop(0)
|
||||
|
||||
if not parts:
|
||||
value = "<>"
|
||||
elif (
|
||||
len(parts) == 1
|
||||
and isinstance(parts[0], six.string_types)
|
||||
):
|
||||
value = "<{}>".format(parts[0])
|
||||
else:
|
||||
value = OptionalPart(parts)
|
||||
|
||||
if counted_symb < 0:
|
||||
out_parts = new_parts
|
||||
else:
|
||||
out_parts = tmp_parts[counted_symb]
|
||||
# Store value
|
||||
out_parts.append(value)
|
||||
continue
|
||||
|
||||
if counted_symb < 0:
|
||||
new_parts.append(part)
|
||||
else:
|
||||
tmp_parts[counted_symb].append(part)
|
||||
|
||||
if tmp_parts:
|
||||
for idx in sorted(tmp_parts.keys()):
|
||||
new_parts.extend(tmp_parts[idx])
|
||||
return new_parts
|
||||
|
||||
|
||||
class TemplatesDict(object):
|
||||
def __init__(self, templates=None):
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
self.set_templates(templates)
|
||||
|
||||
def set_templates(self, templates):
|
||||
if templates is None:
|
||||
self._raw_templates = None
|
||||
self._templates = None
|
||||
self._objected_templates = None
|
||||
elif isinstance(templates, dict):
|
||||
self._raw_templates = copy.deepcopy(templates)
|
||||
self._templates = templates
|
||||
self._objected_templates = self.create_objected_templates(
|
||||
templates)
|
||||
else:
|
||||
raise TypeError("<{}> argument must be a dict, not {}.".format(
|
||||
self.__class__.__name__, str(type(templates))
|
||||
))
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.objected_templates[key]
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.objected_templates.get(key, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def raw_templates(self):
|
||||
return self._raw_templates
|
||||
|
||||
@property
|
||||
def templates(self):
|
||||
return self._templates
|
||||
|
||||
@property
|
||||
def objected_templates(self):
|
||||
return self._objected_templates
|
||||
|
||||
def _create_template_object(self, template):
|
||||
"""Create template object from a template string.
|
||||
|
||||
Separated into method to give option change class of templates.
|
||||
|
||||
Args:
|
||||
template (str): Template string.
|
||||
|
||||
Returns:
|
||||
StringTemplate: Object of template.
|
||||
"""
|
||||
|
||||
return StringTemplate(template)
|
||||
|
||||
def create_objected_templates(self, templates):
|
||||
if not isinstance(templates, dict):
|
||||
raise TypeError("Expected dict object, got {}".format(
|
||||
str(type(templates))
|
||||
))
|
||||
|
||||
objected_templates = copy.deepcopy(templates)
|
||||
inner_queue = collections.deque()
|
||||
inner_queue.append(objected_templates)
|
||||
while inner_queue:
|
||||
item = inner_queue.popleft()
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
for key in tuple(item.keys()):
|
||||
value = item[key]
|
||||
if isinstance(value, six.string_types):
|
||||
item[key] = self._create_template_object(value)
|
||||
elif isinstance(value, dict):
|
||||
inner_queue.append(value)
|
||||
return objected_templates
|
||||
|
||||
def _format_value(self, value, data):
|
||||
if isinstance(value, StringTemplate):
|
||||
return value.format(data)
|
||||
|
||||
if isinstance(value, dict):
|
||||
return self._solve_dict(value, data)
|
||||
return value
|
||||
|
||||
def _solve_dict(self, templates, data):
|
||||
""" Solves templates with entered data.
|
||||
|
||||
Args:
|
||||
templates (dict): All templates which will be formatted.
|
||||
data (dict): Containing keys to be filled into template.
|
||||
|
||||
Returns:
|
||||
dict: With `TemplateResult` in values containing filled or
|
||||
partially filled templates.
|
||||
"""
|
||||
output = collections.defaultdict(dict)
|
||||
for key, value in templates.items():
|
||||
output[key] = self._format_value(value, data)
|
||||
|
||||
return output
|
||||
|
||||
def format(self, in_data, only_keys=True, strict=True):
|
||||
""" Solves templates based on entered data.
|
||||
|
||||
Args:
|
||||
data (dict): Containing keys to be filled into template.
|
||||
only_keys (bool, optional): Decides if environ will be used to
|
||||
fill templates or only keys in data.
|
||||
|
||||
Returns:
|
||||
TemplatesResultDict: Output `TemplateResult` have `strict`
|
||||
attribute set to True so accessing unfilled keys in templates
|
||||
will raise exceptions with explaned error.
|
||||
"""
|
||||
# Create a copy of inserted data
|
||||
data = copy.deepcopy(in_data)
|
||||
|
||||
# Add environment variable to data
|
||||
if only_keys is False:
|
||||
for key, val in os.environ.items():
|
||||
env_key = "$" + key
|
||||
if env_key not in data:
|
||||
data[env_key] = val
|
||||
|
||||
solved = self._solve_dict(self.objected_templates, data)
|
||||
|
||||
output = TemplatesResultDict(solved)
|
||||
output.strict = strict
|
||||
return output
|
||||
|
||||
|
||||
class TemplateResult(str):
|
||||
"""Result of template format with most of information in.
|
||||
|
||||
Args:
|
||||
used_values (dict): Dictionary of template filling data with
|
||||
only used keys.
|
||||
solved (bool): For check if all required keys were filled.
|
||||
template (str): Original template.
|
||||
missing_keys (list): Missing keys that were not in the data. Include
|
||||
missing optional keys.
|
||||
invalid_types (dict): When key was found in data, but value had not
|
||||
allowed DataType. Allowed data types are `numbers`,
|
||||
`str`(`basestring`) and `dict`. Dictionary may cause invalid type
|
||||
when value of key in data is dictionary but template expect string
|
||||
of number.
|
||||
"""
|
||||
|
||||
used_values = None
|
||||
solved = None
|
||||
template = None
|
||||
missing_keys = None
|
||||
invalid_types = None
|
||||
|
||||
def __new__(
|
||||
cls, filled_template, template, solved,
|
||||
used_values, missing_keys, invalid_types
|
||||
):
|
||||
new_obj = super(TemplateResult, cls).__new__(cls, filled_template)
|
||||
new_obj.used_values = used_values
|
||||
new_obj.solved = solved
|
||||
new_obj.template = template
|
||||
new_obj.missing_keys = list(set(missing_keys))
|
||||
new_obj.invalid_types = invalid_types
|
||||
return new_obj
|
||||
|
||||
def __copy__(self, *args, **kwargs):
|
||||
return self.copy()
|
||||
|
||||
def __deepcopy__(self, *args, **kwargs):
|
||||
return self.copy()
|
||||
|
||||
def validate(self):
|
||||
if not self.solved:
|
||||
raise TemplateUnsolved(
|
||||
self.template,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
cls = self.__class__
|
||||
return cls(
|
||||
str(self),
|
||||
self.template,
|
||||
self.solved,
|
||||
self.used_values,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
|
||||
def normalized(self):
|
||||
"""Convert to normalized path."""
|
||||
|
||||
cls = self.__class__
|
||||
return cls(
|
||||
os.path.normpath(self.replace("\\", "/")),
|
||||
self.template,
|
||||
self.solved,
|
||||
self.used_values,
|
||||
self.missing_keys,
|
||||
self.invalid_types
|
||||
)
|
||||
|
||||
|
||||
class TemplatesResultDict(dict):
|
||||
"""Holds and wrap TemplateResults for easy bug report."""
|
||||
|
||||
def __init__(self, in_data, key=None, parent=None, strict=None):
|
||||
super(TemplatesResultDict, self).__init__()
|
||||
for _key, _value in in_data.items():
|
||||
if isinstance(_value, dict):
|
||||
_value = self.__class__(_value, _key, self)
|
||||
self[_key] = _value
|
||||
|
||||
self.key = key
|
||||
self.parent = parent
|
||||
self.strict = strict
|
||||
if self.parent is None and strict is None:
|
||||
self.strict = True
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key not in self.keys():
|
||||
hier = self.hierarchy()
|
||||
hier.append(key)
|
||||
raise TemplateMissingKey(hier)
|
||||
|
||||
value = super(TemplatesResultDict, self).__getitem__(key)
|
||||
if isinstance(value, self.__class__):
|
||||
return value
|
||||
|
||||
# Raise exception when expected solved templates and it is not.
|
||||
if self.raise_on_unsolved and hasattr(value, "validate"):
|
||||
value.validate()
|
||||
return value
|
||||
|
||||
@property
|
||||
def raise_on_unsolved(self):
|
||||
"""To affect this change `strict` attribute."""
|
||||
if self.strict is not None:
|
||||
return self.strict
|
||||
return self.parent.raise_on_unsolved
|
||||
|
||||
def hierarchy(self):
|
||||
"""Return dictionary keys one by one to root parent."""
|
||||
if self.parent is None:
|
||||
return []
|
||||
|
||||
hier_keys = []
|
||||
par_hier = self.parent.hierarchy()
|
||||
if par_hier:
|
||||
hier_keys.extend(par_hier)
|
||||
hier_keys.append(self.key)
|
||||
|
||||
return hier_keys
|
||||
|
||||
@property
|
||||
def missing_keys(self):
|
||||
"""Return missing keys of all children templates."""
|
||||
missing_keys = set()
|
||||
for value in self.values():
|
||||
missing_keys |= value.missing_keys
|
||||
return missing_keys
|
||||
|
||||
@property
|
||||
def invalid_types(self):
|
||||
"""Return invalid types of all children templates."""
|
||||
invalid_types = {}
|
||||
for value in self.values():
|
||||
invalid_types = merge_dict(invalid_types, value.invalid_types)
|
||||
return invalid_types
|
||||
|
||||
@property
|
||||
def used_values(self):
|
||||
"""Return used values for all children templates."""
|
||||
used_values = {}
|
||||
for value in self.values():
|
||||
used_values = merge_dict(used_values, value.used_values)
|
||||
return used_values
|
||||
|
||||
def get_solved(self):
|
||||
"""Get only solved key from templates."""
|
||||
result = {}
|
||||
for key, value in self.items():
|
||||
if isinstance(value, self.__class__):
|
||||
value = value.get_solved()
|
||||
if not value:
|
||||
continue
|
||||
result[key] = value
|
||||
|
||||
elif (
|
||||
not hasattr(value, "solved") or
|
||||
value.solved
|
||||
):
|
||||
result[key] = value
|
||||
return self.__class__(result, key=self.key, parent=self.parent)
|
||||
|
||||
|
||||
class TemplatePartResult:
|
||||
"""Result to store result of template parts."""
|
||||
def __init__(self, optional=False):
|
||||
# Missing keys or invalid value types of required keys
|
||||
self._missing_keys = set()
|
||||
self._invalid_types = {}
|
||||
# Missing keys or invalid value types of optional keys
|
||||
self._missing_optional_keys = set()
|
||||
self._invalid_optional_types = {}
|
||||
|
||||
# Used values stored by key with origin type
|
||||
# - key without any padding or key modifiers
|
||||
# - value from filling data
|
||||
# Example: {"version": 1}
|
||||
self._used_values = {}
|
||||
# Used values stored by key with all modifirs
|
||||
# - value is already formatted string
|
||||
# Example: {"version:0>3": "001"}
|
||||
self._realy_used_values = {}
|
||||
# Concatenated string output after formatting
|
||||
self._output = ""
|
||||
# Is this result from optional part
|
||||
self._optional = True
|
||||
|
||||
def add_output(self, other):
|
||||
if isinstance(other, six.string_types):
|
||||
self._output += other
|
||||
|
||||
elif isinstance(other, TemplatePartResult):
|
||||
self._output += other.output
|
||||
|
||||
self._missing_keys |= other.missing_keys
|
||||
self._missing_optional_keys |= other.missing_optional_keys
|
||||
|
||||
self._invalid_types.update(other.invalid_types)
|
||||
self._invalid_optional_types.update(other.invalid_optional_types)
|
||||
|
||||
if other.optional and not other.solved:
|
||||
return
|
||||
self._used_values.update(other.used_values)
|
||||
self._realy_used_values.update(other.realy_used_values)
|
||||
|
||||
else:
|
||||
raise TypeError("Cannot add data from \"{}\" to \"{}\"".format(
|
||||
str(type(other)), self.__class__.__name__)
|
||||
)
|
||||
|
||||
@property
|
||||
def solved(self):
|
||||
if self.optional:
|
||||
if (
|
||||
len(self.missing_optional_keys) > 0
|
||||
or len(self.invalid_optional_types) > 0
|
||||
):
|
||||
return False
|
||||
return (
|
||||
len(self.missing_keys) == 0
|
||||
and len(self.invalid_types) == 0
|
||||
)
|
||||
|
||||
@property
|
||||
def optional(self):
|
||||
return self._optional
|
||||
|
||||
@property
|
||||
def output(self):
|
||||
return self._output
|
||||
|
||||
@property
|
||||
def missing_keys(self):
|
||||
return self._missing_keys
|
||||
|
||||
@property
|
||||
def missing_optional_keys(self):
|
||||
return self._missing_optional_keys
|
||||
|
||||
@property
|
||||
def invalid_types(self):
|
||||
return self._invalid_types
|
||||
|
||||
@property
|
||||
def invalid_optional_types(self):
|
||||
return self._invalid_optional_types
|
||||
|
||||
@property
|
||||
def realy_used_values(self):
|
||||
return self._realy_used_values
|
||||
|
||||
@property
|
||||
def used_values(self):
|
||||
return self._used_values
|
||||
|
||||
@staticmethod
|
||||
def split_keys_to_subdicts(values):
|
||||
output = {}
|
||||
for key, value in values.items():
|
||||
key_padding = list(KEY_PADDING_PATTERN.findall(key))
|
||||
if key_padding:
|
||||
key = key_padding[0]
|
||||
key_subdict = list(SUB_DICT_PATTERN.findall(key))
|
||||
data = output
|
||||
last_key = key_subdict.pop(-1)
|
||||
for subkey in key_subdict:
|
||||
if subkey not in data:
|
||||
data[subkey] = {}
|
||||
data = data[subkey]
|
||||
data[last_key] = value
|
||||
return output
|
||||
|
||||
def get_clean_used_values(self):
|
||||
new_used_values = {}
|
||||
for key, value in self.used_values.items():
|
||||
if isinstance(value, FormatObject):
|
||||
value = str(value)
|
||||
new_used_values[key] = value
|
||||
|
||||
return self.split_keys_to_subdicts(new_used_values)
|
||||
|
||||
def add_realy_used_value(self, key, value):
|
||||
self._realy_used_values[key] = value
|
||||
|
||||
def add_used_value(self, key, value):
|
||||
self._used_values[key] = value
|
||||
|
||||
def add_missing_key(self, key):
|
||||
if self._optional:
|
||||
self._missing_optional_keys.add(key)
|
||||
else:
|
||||
self._missing_keys.add(key)
|
||||
|
||||
def add_invalid_type(self, key, value):
|
||||
if self._optional:
|
||||
self._invalid_optional_types[key] = type(value)
|
||||
else:
|
||||
self._invalid_types[key] = type(value)
|
||||
|
||||
|
||||
class FormatObject(object):
|
||||
"""Object that can be used for formatting.
|
||||
|
||||
This is base that is valid for to be used in 'StringTemplate' value.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.value = ""
|
||||
|
||||
def __format__(self, *args, **kwargs):
|
||||
return self.value.__format__(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
class FormattingPart:
|
||||
"""String with formatting template.
|
||||
|
||||
Containt only single key to format e.g. "{project[name]}".
|
||||
|
||||
Args:
|
||||
template(str): String containing the formatting key.
|
||||
"""
|
||||
def __init__(self, template):
|
||||
self._template = template
|
||||
|
||||
@property
|
||||
def template(self):
|
||||
return self._template
|
||||
|
||||
def __repr__(self):
|
||||
return "<Format:{}>".format(self._template)
|
||||
|
||||
def __str__(self):
|
||||
return self._template
|
||||
|
||||
@staticmethod
|
||||
def validate_value_type(value):
|
||||
"""Check if value can be used for formatting of single key."""
|
||||
if isinstance(value, (numbers.Number, FormatObject)):
|
||||
return True
|
||||
|
||||
for inh_class in type(value).mro():
|
||||
if inh_class in six.string_types:
|
||||
return True
|
||||
return False
|
||||
|
||||
def format(self, data, result):
|
||||
"""Format the formattings string.
|
||||
|
||||
Args:
|
||||
data(dict): Data that should be used for formatting.
|
||||
result(TemplatePartResult): Object where result is stored.
|
||||
"""
|
||||
key = self.template[1:-1]
|
||||
if key in result.realy_used_values:
|
||||
result.add_output(result.realy_used_values[key])
|
||||
return result
|
||||
|
||||
# check if key expects subdictionary keys (e.g. project[name])
|
||||
existence_check = key
|
||||
key_padding = list(KEY_PADDING_PATTERN.findall(existence_check))
|
||||
if key_padding:
|
||||
existence_check = key_padding[0]
|
||||
key_subdict = list(SUB_DICT_PATTERN.findall(existence_check))
|
||||
|
||||
value = data
|
||||
missing_key = False
|
||||
invalid_type = False
|
||||
used_keys = []
|
||||
for sub_key in key_subdict:
|
||||
if (
|
||||
value is None
|
||||
or (hasattr(value, "items") and sub_key not in value)
|
||||
):
|
||||
missing_key = True
|
||||
used_keys.append(sub_key)
|
||||
break
|
||||
|
||||
if not hasattr(value, "items"):
|
||||
invalid_type = True
|
||||
break
|
||||
|
||||
used_keys.append(sub_key)
|
||||
value = value.get(sub_key)
|
||||
|
||||
if missing_key or invalid_type:
|
||||
if len(used_keys) == 0:
|
||||
invalid_key = key_subdict[0]
|
||||
else:
|
||||
invalid_key = used_keys[0]
|
||||
for idx, sub_key in enumerate(used_keys):
|
||||
if idx == 0:
|
||||
continue
|
||||
invalid_key += "[{0}]".format(sub_key)
|
||||
|
||||
if missing_key:
|
||||
result.add_missing_key(invalid_key)
|
||||
|
||||
elif invalid_type:
|
||||
result.add_invalid_type(invalid_key, value)
|
||||
|
||||
result.add_output(self.template)
|
||||
return result
|
||||
|
||||
if self.validate_value_type(value):
|
||||
fill_data = {}
|
||||
first_value = True
|
||||
for used_key in reversed(used_keys):
|
||||
if first_value:
|
||||
first_value = False
|
||||
fill_data[used_key] = value
|
||||
else:
|
||||
_fill_data = {used_key: fill_data}
|
||||
fill_data = _fill_data
|
||||
|
||||
formatted_value = self.template.format(**fill_data)
|
||||
result.add_realy_used_value(key, formatted_value)
|
||||
result.add_used_value(existence_check, formatted_value)
|
||||
result.add_output(formatted_value)
|
||||
return result
|
||||
|
||||
result.add_invalid_type(key, value)
|
||||
result.add_output(self.template)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class OptionalPart:
|
||||
"""Template part which contains optional formatting strings.
|
||||
|
||||
If this part can't be filled the result is empty string.
|
||||
|
||||
Args:
|
||||
parts(list): Parts of template. Can contain 'str', 'OptionalPart' or
|
||||
'FormattingPart'.
|
||||
"""
|
||||
|
||||
def __init__(self, parts):
|
||||
self._parts = parts
|
||||
|
||||
@property
|
||||
def parts(self):
|
||||
return self._parts
|
||||
|
||||
def __str__(self):
|
||||
return "<{}>".format("".join([str(p) for p in self._parts]))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Optional:{}>".format("".join([str(p) for p in self._parts]))
|
||||
|
||||
def format(self, data, result):
|
||||
new_result = TemplatePartResult(True)
|
||||
for part in self._parts:
|
||||
if isinstance(part, six.string_types):
|
||||
new_result.add_output(part)
|
||||
else:
|
||||
part.format(data, new_result)
|
||||
|
||||
if new_result.solved:
|
||||
result.add_output(new_result)
|
||||
return result
|
||||
224
client/ayon_core/lib/path_tools.py
Normal file
224
client/ayon_core/lib/path_tools.py
Normal file
|
|
@ -0,0 +1,224 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
import platform
|
||||
|
||||
import clique
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_file_size(file_size, suffix=None):
|
||||
"""Returns formatted string with size in appropriate unit.
|
||||
|
||||
Args:
|
||||
file_size (int): Size of file in bytes.
|
||||
suffix (str): Suffix for formatted size. Default is 'B' (as bytes).
|
||||
|
||||
Returns:
|
||||
str: Formatted size using proper unit and passed suffix (e.g. 7 MiB).
|
||||
"""
|
||||
|
||||
if suffix is None:
|
||||
suffix = "B"
|
||||
|
||||
for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
|
||||
if abs(file_size) < 1024.0:
|
||||
return "%3.1f%s%s" % (file_size, unit, suffix)
|
||||
file_size /= 1024.0
|
||||
return "%.1f%s%s" % (file_size, "Yi", suffix)
|
||||
|
||||
|
||||
def create_hard_link(src_path, dst_path):
|
||||
"""Create hardlink of file.
|
||||
|
||||
Args:
|
||||
src_path(str): Full path to a file which is used as source for
|
||||
hardlink.
|
||||
dst_path(str): Full path to a file where a link of source will be
|
||||
added.
|
||||
"""
|
||||
# Use `os.link` if is available
|
||||
# - should be for all platforms with newer python versions
|
||||
if hasattr(os, "link"):
|
||||
os.link(src_path, dst_path)
|
||||
return
|
||||
|
||||
# Windows implementation of hardlinks
|
||||
# - used in Python 2
|
||||
if platform.system().lower() == "windows":
|
||||
import ctypes
|
||||
from ctypes.wintypes import BOOL
|
||||
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
|
||||
CreateHardLink.argtypes = [
|
||||
ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_void_p
|
||||
]
|
||||
CreateHardLink.restype = BOOL
|
||||
|
||||
res = CreateHardLink(dst_path, src_path, None)
|
||||
if res == 0:
|
||||
raise ctypes.WinError()
|
||||
return
|
||||
# Raises not implemented error if gets here
|
||||
raise NotImplementedError(
|
||||
"Implementation of hardlink for current environment is missing."
|
||||
)
|
||||
|
||||
|
||||
def collect_frames(files):
|
||||
"""Returns dict of source path and its frame, if from sequence
|
||||
|
||||
Uses clique as most precise solution, used when anatomy template that
|
||||
created files is not known.
|
||||
|
||||
Assumption is that frames are separated by '.', negative frames are not
|
||||
allowed.
|
||||
|
||||
Args:
|
||||
files(list) or (set with single value): list of source paths
|
||||
|
||||
Returns:
|
||||
(dict): {'/asset/subset_v001.0001.png': '0001', ....}
|
||||
"""
|
||||
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
collections, remainder = clique.assemble(
|
||||
files, minimum_items=1, patterns=patterns)
|
||||
|
||||
sources_and_frames = {}
|
||||
if collections:
|
||||
for collection in collections:
|
||||
src_head = collection.head
|
||||
src_tail = collection.tail
|
||||
|
||||
for index in collection.indexes:
|
||||
src_frame = collection.format("{padding}") % index
|
||||
src_file_name = "{}{}{}".format(
|
||||
src_head, src_frame, src_tail)
|
||||
sources_and_frames[src_file_name] = src_frame
|
||||
else:
|
||||
sources_and_frames[remainder.pop()] = None
|
||||
|
||||
return sources_and_frames
|
||||
|
||||
|
||||
def _rreplace(s, a, b, n=1):
|
||||
"""Replace a with b in string s from right side n times."""
|
||||
return b.join(s.rsplit(a, n))
|
||||
|
||||
|
||||
def version_up(filepath):
|
||||
"""Version up filepath to a new non-existing version.
|
||||
|
||||
Parses for a version identifier like `_v001` or `.v001`
|
||||
When no version present _v001 is appended as suffix.
|
||||
|
||||
Args:
|
||||
filepath (str): full url
|
||||
|
||||
Returns:
|
||||
(str): filepath with increased version number
|
||||
|
||||
"""
|
||||
dirname = os.path.dirname(filepath)
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
regex = r"[._]v\d+"
|
||||
matches = re.findall(regex, str(basename), re.IGNORECASE)
|
||||
if not matches:
|
||||
log.info("Creating version...")
|
||||
new_label = "_v{version:03d}".format(version=1)
|
||||
new_basename = "{}{}".format(basename, new_label)
|
||||
else:
|
||||
label = matches[-1]
|
||||
version = re.search(r"\d+", label).group()
|
||||
padding = len(version)
|
||||
|
||||
new_version = int(version) + 1
|
||||
new_version = '{version:0{padding}d}'.format(version=new_version,
|
||||
padding=padding)
|
||||
new_label = label.replace(version, new_version, 1)
|
||||
new_basename = _rreplace(basename, label, new_label)
|
||||
new_filename = "{}{}".format(new_basename, ext)
|
||||
new_filename = os.path.join(dirname, new_filename)
|
||||
new_filename = os.path.normpath(new_filename)
|
||||
|
||||
if new_filename == filepath:
|
||||
raise RuntimeError("Created path is the same as current file,"
|
||||
"this is a bug")
|
||||
|
||||
# We check for version clashes against the current file for any file
|
||||
# that matches completely in name up to the {version} label found. Thus
|
||||
# if source file was test_v001_test.txt we want to also check clashes
|
||||
# against test_v002.txt but do want to preserve the part after the version
|
||||
# label for our new filename
|
||||
clash_basename = new_basename
|
||||
if not clash_basename.endswith(new_label):
|
||||
index = (clash_basename.find(new_label))
|
||||
index += len(new_label)
|
||||
clash_basename = clash_basename[:index]
|
||||
|
||||
for file in os.listdir(dirname):
|
||||
if file.endswith(ext) and file.startswith(clash_basename):
|
||||
log.info("Skipping existing version %s" % new_label)
|
||||
return version_up(new_filename)
|
||||
|
||||
log.info("New version %s" % new_label)
|
||||
return new_filename
|
||||
|
||||
|
||||
def get_version_from_path(file):
|
||||
"""Find version number in file path string.
|
||||
|
||||
Args:
|
||||
file (str): file path
|
||||
|
||||
Returns:
|
||||
str: version number in string ('001')
|
||||
"""
|
||||
|
||||
pattern = re.compile(r"[\._]v([0-9]+)", re.IGNORECASE)
|
||||
try:
|
||||
return pattern.findall(file)[-1]
|
||||
except IndexError:
|
||||
log.error(
|
||||
"templates:get_version_from_workfile:"
|
||||
"`{}` missing version string."
|
||||
"Example `v004`".format(file)
|
||||
)
|
||||
|
||||
|
||||
def get_last_version_from_path(path_dir, filter):
|
||||
"""Find last version of given directory content.
|
||||
|
||||
Args:
|
||||
path_dir (str): directory path
|
||||
filter (list): list of strings used as file name filter
|
||||
|
||||
Returns:
|
||||
str: file name with last version
|
||||
|
||||
Example:
|
||||
last_version_file = get_last_version_from_path(
|
||||
"/project/shots/shot01/work", ["shot01", "compositing", "nk"])
|
||||
"""
|
||||
|
||||
assert os.path.isdir(path_dir), "`path_dir` argument needs to be directory"
|
||||
assert isinstance(filter, list) and (
|
||||
len(filter) != 0), "`filter` argument needs to be list and not empty"
|
||||
|
||||
filtred_files = list()
|
||||
|
||||
# form regex for filtering
|
||||
pattern = r".*".join(filter)
|
||||
|
||||
for file in os.listdir(path_dir):
|
||||
if not re.findall(pattern, file):
|
||||
continue
|
||||
filtred_files.append(file)
|
||||
|
||||
if filtred_files:
|
||||
sorted(filtred_files)
|
||||
return filtred_files[-1]
|
||||
|
||||
return None
|
||||
72
client/ayon_core/lib/plugin_tools.py
Normal file
72
client/ayon_core/lib/plugin_tools.py
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Avalon/Pyblish plugin tools."""
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def prepare_template_data(fill_pairs):
|
||||
"""
|
||||
Prepares formatted data for filling template.
|
||||
|
||||
It produces multiple variants of keys (key, Key, KEY) to control
|
||||
format of filled template.
|
||||
|
||||
Args:
|
||||
fill_pairs (iterable) of tuples (key, value)
|
||||
Returns:
|
||||
(dict)
|
||||
('host', 'maya') > {'host':'maya', 'Host': 'Maya', 'HOST': 'MAYA'}
|
||||
|
||||
"""
|
||||
fill_data = {}
|
||||
regex = re.compile(r"[a-zA-Z0-9]")
|
||||
for key, value in dict(fill_pairs).items():
|
||||
# Handle cases when value is `None` (standalone publisher)
|
||||
if value is None:
|
||||
continue
|
||||
# Keep value as it is
|
||||
fill_data[key] = value
|
||||
# Both key and value are with upper case
|
||||
fill_data[key.upper()] = value.upper()
|
||||
|
||||
# Capitalize only first char of value
|
||||
# - conditions are because of possible index errors
|
||||
# - regex is to skip symbols that are not chars or numbers
|
||||
# - e.g. "{key}" which starts with curly bracket
|
||||
capitalized = ""
|
||||
for idx in range(len(value or "")):
|
||||
char = value[idx]
|
||||
if not regex.match(char):
|
||||
capitalized += char
|
||||
else:
|
||||
capitalized += char.upper()
|
||||
capitalized += value[idx + 1:]
|
||||
break
|
||||
|
||||
fill_data[key.capitalize()] = capitalized
|
||||
|
||||
return fill_data
|
||||
|
||||
|
||||
def source_hash(filepath, *args):
|
||||
"""Generate simple identifier for a source file.
|
||||
This is used to identify whether a source file has previously been
|
||||
processe into the pipeline, e.g. a texture.
|
||||
The hash is based on source filepath, modification time and file size.
|
||||
This is only used to identify whether a specific source file was already
|
||||
published before from the same location with the same modification date.
|
||||
We opt to do it this way as opposed to Avalanch C4 hash as this is much
|
||||
faster and predictable enough for all our production use cases.
|
||||
Args:
|
||||
filepath (str): The source file path.
|
||||
You can specify additional arguments in the function
|
||||
to allow for specific 'processing' values to be included.
|
||||
"""
|
||||
# We replace dots with comma because . cannot be a key in a pymongo dict.
|
||||
file_name = os.path.basename(filepath)
|
||||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
227
client/ayon_core/lib/profiles_filtering.py
Normal file
227
client/ayon_core/lib/profiles_filtering.py
Normal file
|
|
@ -0,0 +1,227 @@
|
|||
import re
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def compile_list_of_regexes(in_list):
|
||||
"""Convert strings in entered list to compiled regex objects."""
|
||||
regexes = list()
|
||||
if not in_list:
|
||||
return regexes
|
||||
|
||||
for item in in_list:
|
||||
if not item:
|
||||
continue
|
||||
try:
|
||||
regexes.append(re.compile(item))
|
||||
except TypeError:
|
||||
print((
|
||||
"Invalid type \"{}\" value \"{}\"."
|
||||
" Expected string based object. Skipping."
|
||||
).format(str(type(item)), str(item)))
|
||||
return regexes
|
||||
|
||||
|
||||
def _profile_exclusion(matching_profiles, logger):
|
||||
"""Find out most matching profile byt host, task and family match.
|
||||
|
||||
Profiles are selectively filtered. Each item in passed argument must
|
||||
contain tuple of (profile, profile's score) where score is list of
|
||||
booleans. Each boolean represents existence of filter for specific key.
|
||||
Profiles are looped in sequence. In each sequence are profiles split into
|
||||
true_list and false_list. For next sequence loop are used profiles in
|
||||
true_list if there are any profiles else false_list is used.
|
||||
|
||||
Filtering ends when only one profile left in true_list. Or when all
|
||||
existence booleans loops passed, in that case first profile from remainded
|
||||
profiles is returned.
|
||||
|
||||
Args:
|
||||
matching_profiles (list): Profiles with same scores. Each item is tuple
|
||||
with (profile, profile values)
|
||||
|
||||
Returns:
|
||||
dict: Most matching profile.
|
||||
"""
|
||||
if not matching_profiles:
|
||||
return None
|
||||
|
||||
if len(matching_profiles) == 1:
|
||||
return matching_profiles[0][0]
|
||||
|
||||
scores_len = len(matching_profiles[0][1])
|
||||
for idx in range(scores_len):
|
||||
profiles_true = []
|
||||
profiles_false = []
|
||||
for profile, score in matching_profiles:
|
||||
if score[idx]:
|
||||
profiles_true.append((profile, score))
|
||||
else:
|
||||
profiles_false.append((profile, score))
|
||||
|
||||
if profiles_true:
|
||||
matching_profiles = profiles_true
|
||||
else:
|
||||
matching_profiles = profiles_false
|
||||
|
||||
if len(matching_profiles) == 1:
|
||||
return matching_profiles[0][0]
|
||||
|
||||
return matching_profiles[0][0]
|
||||
|
||||
|
||||
def fullmatch(regex, string, flags=0):
|
||||
"""Emulate python-3.4 re.fullmatch()."""
|
||||
matched = re.match(regex, string, flags=flags)
|
||||
if matched and matched.span()[1] == len(string):
|
||||
return matched
|
||||
return None
|
||||
|
||||
|
||||
def validate_value_by_regexes(value, in_list):
|
||||
"""Validates in any regex from list match entered value.
|
||||
|
||||
Args:
|
||||
value (str): String where regexes is checked.
|
||||
in_list (list): List with regexes.
|
||||
|
||||
Returns:
|
||||
int: Returns `0` when list is not set, is empty or contain "*".
|
||||
Returns `1` when any regex match value and returns `-1`
|
||||
when none of regexes match entered value.
|
||||
"""
|
||||
if not in_list:
|
||||
return 0
|
||||
|
||||
if not isinstance(in_list, (list, tuple, set)):
|
||||
in_list = [in_list]
|
||||
|
||||
if "*" in in_list:
|
||||
return 0
|
||||
|
||||
# If value is not set and in list has specific values then resolve value
|
||||
# as not matching.
|
||||
if not value:
|
||||
return -1
|
||||
|
||||
regexes = compile_list_of_regexes(in_list)
|
||||
for regex in regexes:
|
||||
if hasattr(regex, "fullmatch"):
|
||||
result = regex.fullmatch(value)
|
||||
else:
|
||||
result = fullmatch(regex, value)
|
||||
if result:
|
||||
return 1
|
||||
return -1
|
||||
|
||||
|
||||
def filter_profiles(profiles_data, key_values, keys_order=None, logger=None):
|
||||
""" Filter profiles by entered key -> values.
|
||||
|
||||
Profile if marked with score for each key/value from `key_values` with
|
||||
points -1, 0 or 1.
|
||||
- if profile contain the key and profile's value contain value from
|
||||
`key_values` then profile gets 1 point
|
||||
- if profile does not contain the key or profile's value is empty or
|
||||
contain "*" then got 0 point
|
||||
- if profile contain the key, profile's value is not empty and does not
|
||||
contain "*" and value from `key_values` is not available in the value
|
||||
then got -1 point
|
||||
|
||||
If profile gets -1 point at any time then is skipped and not used for
|
||||
output. Profile with higher score is returned. If there are multiple
|
||||
profiles with same score then first in order is used (order of profiles
|
||||
matter).
|
||||
|
||||
Args:
|
||||
profiles_data (list): Profile definitions as dictionaries.
|
||||
key_values (dict): Mapping of Key <-> Value. Key is checked if is
|
||||
available in profile and if Value is matching it's values.
|
||||
keys_order (list, tuple): Order of keys from `key_values` which matters
|
||||
only when multiple profiles have same score.
|
||||
logger (logging.Logger): Optionally can be passed different logger.
|
||||
|
||||
Returns:
|
||||
dict/None: Return most matching profile or None if none of profiles
|
||||
match at least one criteria.
|
||||
"""
|
||||
if not profiles_data:
|
||||
return None
|
||||
|
||||
if not logger:
|
||||
logger = log
|
||||
|
||||
if not keys_order:
|
||||
keys_order = tuple(key_values.keys())
|
||||
else:
|
||||
_keys_order = list(keys_order)
|
||||
# Make all keys from `key_values` are passed
|
||||
for key in key_values.keys():
|
||||
if key not in _keys_order:
|
||||
_keys_order.append(key)
|
||||
keys_order = tuple(_keys_order)
|
||||
|
||||
log_parts = " | ".join([
|
||||
"{}: \"{}\"".format(*item)
|
||||
for item in key_values.items()
|
||||
])
|
||||
|
||||
logger.debug(
|
||||
"Looking for matching profile for: {}".format(log_parts)
|
||||
)
|
||||
|
||||
matching_profiles = None
|
||||
highest_profile_points = -1
|
||||
# Each profile get 1 point for each matching filter. Profile with most
|
||||
# points is returned. For cases when more than one profile will match
|
||||
# are also stored ordered lists of matching values.
|
||||
for profile in profiles_data:
|
||||
profile_points = 0
|
||||
profile_scores = []
|
||||
|
||||
for key in keys_order:
|
||||
value = key_values[key]
|
||||
match = validate_value_by_regexes(value, profile.get(key))
|
||||
if match == -1:
|
||||
profile_value = profile.get(key) or []
|
||||
logger.debug(
|
||||
"\"{}\" not found in \"{}\": {}".format(value, key,
|
||||
profile_value)
|
||||
)
|
||||
profile_points = -1
|
||||
break
|
||||
|
||||
profile_points += match
|
||||
profile_scores.append(bool(match))
|
||||
|
||||
if (
|
||||
profile_points < 0
|
||||
or profile_points < highest_profile_points
|
||||
):
|
||||
continue
|
||||
|
||||
if profile_points > highest_profile_points:
|
||||
matching_profiles = []
|
||||
highest_profile_points = profile_points
|
||||
|
||||
if profile_points == highest_profile_points:
|
||||
matching_profiles.append((profile, profile_scores))
|
||||
|
||||
if not matching_profiles:
|
||||
logger.debug(
|
||||
"None of profiles match your setup. {}".format(log_parts)
|
||||
)
|
||||
return None
|
||||
|
||||
if len(matching_profiles) > 1:
|
||||
logger.debug(
|
||||
"More than one profile match your setup. {}".format(log_parts)
|
||||
)
|
||||
|
||||
profile = _profile_exclusion(matching_profiles, logger)
|
||||
if profile:
|
||||
logger.debug(
|
||||
"Profile selected: {}".format(profile)
|
||||
)
|
||||
return profile
|
||||
29
client/ayon_core/lib/profiling.py
Normal file
29
client/ayon_core/lib/profiling.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Provide profiling decorator."""
|
||||
import os
|
||||
import cProfile
|
||||
|
||||
|
||||
def do_profile(fn, to_file=None):
|
||||
"""Wraps function in profiler run and print stat after it is done.
|
||||
|
||||
Args:
|
||||
to_file (str, optional): If specified, dumps stats into the file
|
||||
instead of printing.
|
||||
|
||||
"""
|
||||
if to_file:
|
||||
to_file = to_file.format(pid=os.getpid())
|
||||
|
||||
def profiled(*args, **kwargs):
|
||||
profiler = cProfile.Profile()
|
||||
try:
|
||||
profiler.enable()
|
||||
res = fn(*args, **kwargs)
|
||||
profiler.disable()
|
||||
return res
|
||||
finally:
|
||||
if to_file:
|
||||
profiler.dump_stats(to_file)
|
||||
else:
|
||||
profiler.print_stats()
|
||||
325
client/ayon_core/lib/project_backpack.py
Normal file
325
client/ayon_core/lib/project_backpack.py
Normal file
|
|
@ -0,0 +1,325 @@
|
|||
"""These lib functions are for development purposes.
|
||||
|
||||
WARNING:
|
||||
This is not meant for production data. Please don't write code which is
|
||||
dependent on functionality here.
|
||||
|
||||
Goal is to be able to create package of current state of project with related
|
||||
documents from mongo and files from disk to zip file and then be able
|
||||
to recreate the project based on the zip.
|
||||
|
||||
This gives ability to create project where a changes and tests can be done.
|
||||
|
||||
Keep in mind that to be able to create a package of project has few
|
||||
requirements. Possible requirement should be listed in 'pack_project' function.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import platform
|
||||
import tempfile
|
||||
import shutil
|
||||
import datetime
|
||||
|
||||
import zipfile
|
||||
from openpype.client.mongo import (
|
||||
load_json_file,
|
||||
get_project_connection,
|
||||
replace_project_documents,
|
||||
store_project_documents,
|
||||
)
|
||||
|
||||
DOCUMENTS_FILE_NAME = "database"
|
||||
METADATA_FILE_NAME = "metadata"
|
||||
PROJECT_FILES_DIR = "project_files"
|
||||
|
||||
|
||||
def add_timestamp(filepath):
|
||||
"""Add timestamp string to a file."""
|
||||
base, ext = os.path.splitext(filepath)
|
||||
timestamp = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
|
||||
new_base = "{}_{}".format(base, timestamp)
|
||||
return new_base + ext
|
||||
|
||||
|
||||
def get_project_document(project_name, database_name=None):
|
||||
"""Query project document.
|
||||
|
||||
Function 'get_project' from client api cannot be used as it does not allow
|
||||
to change which 'database_name' is used.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project.
|
||||
database_name (Optional[str]): Name of mongo database where to look for
|
||||
project.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Project document or None.
|
||||
"""
|
||||
|
||||
col = get_project_connection(project_name, database_name)
|
||||
return col.find_one({"type": "project"})
|
||||
|
||||
|
||||
def _pack_files_to_zip(zip_stream, source_path, root_path):
|
||||
"""Pack files to a zip stream.
|
||||
|
||||
Args:
|
||||
zip_stream (zipfile.ZipFile): Stream to a zipfile.
|
||||
source_path (str): Path to a directory where files are.
|
||||
root_path (str): Path to a directory which is used for calculation
|
||||
of relative path.
|
||||
"""
|
||||
|
||||
for root, _, filenames in os.walk(source_path):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(root, filename)
|
||||
# TODO add one more folder
|
||||
archive_name = os.path.join(
|
||||
PROJECT_FILES_DIR,
|
||||
os.path.relpath(filepath, root_path)
|
||||
)
|
||||
zip_stream.write(filepath, archive_name)
|
||||
|
||||
|
||||
def pack_project(
|
||||
project_name,
|
||||
destination_dir=None,
|
||||
only_documents=False,
|
||||
database_name=None
|
||||
):
|
||||
"""Make a package of a project with mongo documents and files.
|
||||
|
||||
This function has few restrictions:
|
||||
- project must have only one root
|
||||
- project must have all templates starting with
|
||||
"{root[...]}/{project[name]}"
|
||||
|
||||
Args:
|
||||
project_name (str): Project that should be packaged.
|
||||
destination_dir (Optional[str]): Optional path where zip will be
|
||||
stored. Project's root is used if not passed.
|
||||
only_documents (Optional[bool]): Pack only Mongo documents and skip
|
||||
files.
|
||||
database_name (Optional[str]): Custom database name from which is
|
||||
project queried.
|
||||
"""
|
||||
|
||||
print("Creating package of project \"{}\"".format(project_name))
|
||||
# Validate existence of project
|
||||
project_doc = get_project_document(project_name, database_name)
|
||||
if not project_doc:
|
||||
raise ValueError("Project \"{}\" was not found in database".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
if only_documents and not destination_dir:
|
||||
raise ValueError((
|
||||
"Destination directory must be defined"
|
||||
" when only documents should be packed."
|
||||
))
|
||||
|
||||
root_path = None
|
||||
source_root = {}
|
||||
project_source_path = None
|
||||
if not only_documents:
|
||||
roots = project_doc["config"]["roots"]
|
||||
# Determine root directory of project
|
||||
source_root = None
|
||||
source_root_name = None
|
||||
for root_name, root_value in roots.items():
|
||||
if source_root is not None:
|
||||
raise ValueError(
|
||||
"Packaging is supported only for single root projects"
|
||||
)
|
||||
source_root = root_value
|
||||
source_root_name = root_name
|
||||
|
||||
root_path = source_root[platform.system().lower()]
|
||||
print("Using root \"{}\" with path \"{}\"".format(
|
||||
source_root_name, root_path
|
||||
))
|
||||
|
||||
project_source_path = os.path.join(root_path, project_name)
|
||||
if not os.path.exists(project_source_path):
|
||||
raise ValueError("Didn't find source of project files")
|
||||
|
||||
# Determine zip filepath where data will be stored
|
||||
if not destination_dir:
|
||||
destination_dir = root_path
|
||||
|
||||
if not destination_dir:
|
||||
raise ValueError(
|
||||
"Project {} does not have any roots.".format(project_name)
|
||||
)
|
||||
|
||||
destination_dir = os.path.normpath(destination_dir)
|
||||
if not os.path.exists(destination_dir):
|
||||
os.makedirs(destination_dir)
|
||||
|
||||
zip_path = os.path.join(destination_dir, project_name + ".zip")
|
||||
|
||||
print("Project will be packaged into \"{}\"".format(zip_path))
|
||||
# Rename already existing zip
|
||||
if os.path.exists(zip_path):
|
||||
dst_filepath = add_timestamp(zip_path)
|
||||
os.rename(zip_path, dst_filepath)
|
||||
|
||||
# We can add more data
|
||||
metadata = {
|
||||
"project_name": project_name,
|
||||
"root": source_root,
|
||||
"version": 1
|
||||
}
|
||||
# Create temp json file where metadata are stored
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".json", delete=False) as s:
|
||||
temp_metadata_json = s.name
|
||||
|
||||
with open(temp_metadata_json, "w") as stream:
|
||||
json.dump(metadata, stream)
|
||||
|
||||
# Create temp json file where database documents are stored
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".json", delete=False) as s:
|
||||
temp_docs_json = s.name
|
||||
|
||||
# Query all project documents and store them to temp json
|
||||
store_project_documents(project_name, temp_docs_json, database_name)
|
||||
|
||||
print("Packing files into zip")
|
||||
# Write all to zip file
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_stream:
|
||||
# Add metadata file
|
||||
zip_stream.write(temp_metadata_json, METADATA_FILE_NAME + ".json")
|
||||
# Add database documents
|
||||
zip_stream.write(temp_docs_json, DOCUMENTS_FILE_NAME + ".json")
|
||||
|
||||
# Add project files to zip
|
||||
if not only_documents:
|
||||
_pack_files_to_zip(zip_stream, project_source_path, root_path)
|
||||
|
||||
print("Cleaning up")
|
||||
# Cleanup
|
||||
os.remove(temp_docs_json)
|
||||
os.remove(temp_metadata_json)
|
||||
|
||||
print("*** Packing finished ***")
|
||||
|
||||
|
||||
def _unpack_project_files(unzip_dir, root_path, project_name):
|
||||
"""Move project files from unarchived temp folder to new root.
|
||||
|
||||
Unpack is skipped if source files are not available in the zip. That can
|
||||
happen if nothing was published yet or only documents were stored to
|
||||
package.
|
||||
|
||||
Args:
|
||||
unzip_dir (str): Location where zip was unzipped.
|
||||
root_path (str): Path to new root.
|
||||
project_name (str): Name of project.
|
||||
"""
|
||||
|
||||
src_project_files_dir = os.path.join(
|
||||
unzip_dir, PROJECT_FILES_DIR, project_name
|
||||
)
|
||||
# Skip if files are not in the zip
|
||||
if not os.path.exists(src_project_files_dir):
|
||||
return
|
||||
|
||||
# Make sure root path exists
|
||||
if not os.path.exists(root_path):
|
||||
os.makedirs(root_path)
|
||||
|
||||
dst_project_files_dir = os.path.normpath(
|
||||
os.path.join(root_path, project_name)
|
||||
)
|
||||
if os.path.exists(dst_project_files_dir):
|
||||
new_path = add_timestamp(dst_project_files_dir)
|
||||
print("Project folder already exists. Renamed \"{}\" -> \"{}\"".format(
|
||||
dst_project_files_dir, new_path
|
||||
))
|
||||
os.rename(dst_project_files_dir, new_path)
|
||||
|
||||
print("Moving project files from temp \"{}\" -> \"{}\"".format(
|
||||
src_project_files_dir, dst_project_files_dir
|
||||
))
|
||||
shutil.move(src_project_files_dir, dst_project_files_dir)
|
||||
|
||||
|
||||
def unpack_project(
|
||||
path_to_zip, new_root=None, database_only=None, database_name=None
|
||||
):
|
||||
"""Unpack project zip file to recreate project.
|
||||
|
||||
Args:
|
||||
path_to_zip (str): Path to zip which was created using 'pack_project'
|
||||
function.
|
||||
new_root (str): Optional way how to set different root path for
|
||||
unpacked project.
|
||||
database_only (Optional[bool]): Unpack only database from zip.
|
||||
database_name (str): Name of database where project will be recreated.
|
||||
"""
|
||||
|
||||
if database_only is None:
|
||||
database_only = False
|
||||
|
||||
print("Unpacking project from zip {}".format(path_to_zip))
|
||||
if not os.path.exists(path_to_zip):
|
||||
print("Zip file does not exists: {}".format(path_to_zip))
|
||||
return
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(prefix="unpack_")
|
||||
print("Zip is extracted to temp: {}".format(tmp_dir))
|
||||
with zipfile.ZipFile(path_to_zip, "r") as zip_stream:
|
||||
if database_only:
|
||||
for filename in (
|
||||
"{}.json".format(METADATA_FILE_NAME),
|
||||
"{}.json".format(DOCUMENTS_FILE_NAME),
|
||||
):
|
||||
zip_stream.extract(filename, tmp_dir)
|
||||
else:
|
||||
zip_stream.extractall(tmp_dir)
|
||||
|
||||
metadata_json_path = os.path.join(tmp_dir, METADATA_FILE_NAME + ".json")
|
||||
with open(metadata_json_path, "r") as stream:
|
||||
metadata = json.load(stream)
|
||||
|
||||
docs_json_path = os.path.join(tmp_dir, DOCUMENTS_FILE_NAME + ".json")
|
||||
docs = load_json_file(docs_json_path)
|
||||
|
||||
low_platform = platform.system().lower()
|
||||
project_name = metadata["project_name"]
|
||||
root_path = metadata["root"].get(low_platform)
|
||||
|
||||
# Drop existing collection
|
||||
replace_project_documents(project_name, docs, database_name)
|
||||
print("Creating project documents ({})".format(len(docs)))
|
||||
|
||||
# Skip change of root if is the same as the one stored in metadata
|
||||
if (
|
||||
new_root
|
||||
and (os.path.normpath(new_root) == os.path.normpath(root_path))
|
||||
):
|
||||
new_root = None
|
||||
|
||||
if new_root:
|
||||
print("Using different root path {}".format(new_root))
|
||||
root_path = new_root
|
||||
|
||||
project_doc = get_project_document(project_name)
|
||||
roots = project_doc["config"]["roots"]
|
||||
key = tuple(roots.keys())[0]
|
||||
update_key = "config.roots.{}.{}".format(key, low_platform)
|
||||
collection = get_project_connection(project_name, database_name)
|
||||
collection.update_one(
|
||||
{"_id": project_doc["_id"]},
|
||||
{"$set": {
|
||||
update_key: new_root
|
||||
}}
|
||||
)
|
||||
|
||||
_unpack_project_files(tmp_dir, root_path, project_name)
|
||||
|
||||
# CLeanup
|
||||
print("Cleaning up")
|
||||
shutil.rmtree(tmp_dir)
|
||||
print("*** Unpack finished ***")
|
||||
108
client/ayon_core/lib/pype_info.py
Normal file
108
client/ayon_core/lib/pype_info.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
import platform
|
||||
import getpass
|
||||
import socket
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.settings.lib import get_local_settings
|
||||
from .execute import get_openpype_execute_args
|
||||
from .local_settings import get_local_site_id
|
||||
from .openpype_version import (
|
||||
is_running_from_build,
|
||||
get_openpype_version,
|
||||
get_build_version
|
||||
)
|
||||
|
||||
|
||||
def get_openpype_info():
|
||||
"""Information about currently used Pype process."""
|
||||
executable_args = get_openpype_execute_args()
|
||||
if is_running_from_build():
|
||||
version_type = "build"
|
||||
else:
|
||||
version_type = "code"
|
||||
|
||||
return {
|
||||
"build_verison": get_build_version(),
|
||||
"version": get_openpype_version(),
|
||||
"version_type": version_type,
|
||||
"executable": executable_args[-1],
|
||||
"pype_root": os.environ["OPENPYPE_REPOS_ROOT"],
|
||||
"mongo_url": os.environ["OPENPYPE_MONGO"]
|
||||
}
|
||||
|
||||
|
||||
def get_ayon_info():
|
||||
executable_args = get_openpype_execute_args()
|
||||
if is_running_from_build():
|
||||
version_type = "build"
|
||||
else:
|
||||
version_type = "code"
|
||||
return {
|
||||
"build_verison": get_build_version(),
|
||||
"version_type": version_type,
|
||||
"executable": executable_args[-1],
|
||||
"ayon_root": os.environ["AYON_ROOT"],
|
||||
"server_url": os.environ["AYON_SERVER_URL"]
|
||||
}
|
||||
|
||||
|
||||
def get_workstation_info():
|
||||
"""Basic information about workstation."""
|
||||
host_name = socket.gethostname()
|
||||
try:
|
||||
host_ip = socket.gethostbyname(host_name)
|
||||
except socket.gaierror:
|
||||
host_ip = "127.0.0.1"
|
||||
|
||||
return {
|
||||
"hostname": host_name,
|
||||
"hostip": host_ip,
|
||||
"username": getpass.getuser(),
|
||||
"system_name": platform.system(),
|
||||
"local_id": get_local_site_id()
|
||||
}
|
||||
|
||||
|
||||
def get_all_current_info():
|
||||
"""All information about current process in one dictionary."""
|
||||
|
||||
output = {
|
||||
"workstation": get_workstation_info(),
|
||||
"env": os.environ.copy(),
|
||||
"local_settings": get_local_settings()
|
||||
}
|
||||
if AYON_SERVER_ENABLED:
|
||||
output["ayon"] = get_ayon_info()
|
||||
else:
|
||||
output["openpype"] = get_openpype_info()
|
||||
return output
|
||||
|
||||
|
||||
def extract_pype_info_to_file(dirpath):
|
||||
"""Extract all current info to a file.
|
||||
|
||||
It is possible to define onpy directory path. Filename is concatenated with
|
||||
pype version, workstation site id and timestamp.
|
||||
|
||||
Args:
|
||||
dirpath (str): Path to directory where file will be stored.
|
||||
|
||||
Returns:
|
||||
filepath (str): Full path to file where data were extracted.
|
||||
"""
|
||||
filename = "{}_{}_{}.json".format(
|
||||
get_openpype_version(),
|
||||
get_local_site_id(),
|
||||
datetime.datetime.now().strftime("%y%m%d%H%M%S")
|
||||
)
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
data = get_all_current_info()
|
||||
if not os.path.exists(dirpath):
|
||||
os.makedirs(dirpath)
|
||||
|
||||
with open(filepath, "w") as file_stream:
|
||||
json.dump(data, file_stream, indent=4)
|
||||
return filepath
|
||||
44
client/ayon_core/lib/python_2_comp.py
Normal file
44
client/ayon_core/lib/python_2_comp.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import weakref
|
||||
|
||||
|
||||
WeakMethod = getattr(weakref, "WeakMethod", None)
|
||||
|
||||
if WeakMethod is None:
|
||||
class _WeakCallable:
|
||||
def __init__(self, obj, func):
|
||||
self.im_self = obj
|
||||
self.im_func = func
|
||||
|
||||
def __call__(self, *args, **kws):
|
||||
if self.im_self is None:
|
||||
return self.im_func(*args, **kws)
|
||||
else:
|
||||
return self.im_func(self.im_self, *args, **kws)
|
||||
|
||||
|
||||
class WeakMethod:
|
||||
""" Wraps a function or, more importantly, a bound method in
|
||||
a way that allows a bound method's object to be GCed, while
|
||||
providing the same interface as a normal weak reference. """
|
||||
|
||||
def __init__(self, fn):
|
||||
try:
|
||||
self._obj = weakref.ref(fn.im_self)
|
||||
self._meth = fn.im_func
|
||||
except AttributeError:
|
||||
# It's not a bound method
|
||||
self._obj = None
|
||||
self._meth = fn
|
||||
|
||||
def __call__(self):
|
||||
if self._dead():
|
||||
return None
|
||||
return _WeakCallable(self._getobj(), self._meth)
|
||||
|
||||
def _dead(self):
|
||||
return self._obj is not None and self._obj() is None
|
||||
|
||||
def _getobj(self):
|
||||
if self._obj is None:
|
||||
return None
|
||||
return self._obj()
|
||||
299
client/ayon_core/lib/python_module_tools.py
Normal file
299
client/ayon_core/lib/python_module_tools.py
Normal file
|
|
@ -0,0 +1,299 @@
|
|||
import os
|
||||
import sys
|
||||
import types
|
||||
import importlib
|
||||
import inspect
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def import_filepath(filepath, module_name=None):
|
||||
"""Import python file as python module.
|
||||
|
||||
Python 2 and Python 3 compatibility.
|
||||
|
||||
Args:
|
||||
filepath(str): Path to python file.
|
||||
module_name(str): Name of loaded module. Only for Python 3. By default
|
||||
is filled with filename of filepath.
|
||||
"""
|
||||
if module_name is None:
|
||||
module_name = os.path.splitext(os.path.basename(filepath))[0]
|
||||
|
||||
# Make sure it is not 'unicode' in Python 2
|
||||
module_name = str(module_name)
|
||||
|
||||
# Prepare module object where content of file will be parsed
|
||||
module = types.ModuleType(module_name)
|
||||
module.__file__ = filepath
|
||||
|
||||
if six.PY3:
|
||||
# Use loader so module has full specs
|
||||
module_loader = importlib.machinery.SourceFileLoader(
|
||||
module_name, filepath
|
||||
)
|
||||
module_loader.exec_module(module)
|
||||
else:
|
||||
# Execute module code and store content to module
|
||||
with open(filepath) as _stream:
|
||||
# Execute content and store it to module object
|
||||
six.exec_(_stream.read(), module.__dict__)
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def modules_from_path(folder_path):
|
||||
"""Get python scripts as modules from a path.
|
||||
|
||||
Arguments:
|
||||
path (str): Path to folder containing python scripts.
|
||||
|
||||
Returns:
|
||||
tuple<list, list>: First list contains successfully imported modules
|
||||
and second list contains tuples of path and exception.
|
||||
"""
|
||||
crashed = []
|
||||
modules = []
|
||||
output = (modules, crashed)
|
||||
# Just skip and return empty list if path is not set
|
||||
if not folder_path:
|
||||
return output
|
||||
|
||||
# Do not allow relative imports
|
||||
if folder_path.startswith("."):
|
||||
log.warning((
|
||||
"BUG: Relative paths are not allowed for security reasons. {}"
|
||||
).format(folder_path))
|
||||
return output
|
||||
|
||||
folder_path = os.path.normpath(folder_path)
|
||||
|
||||
if not os.path.isdir(folder_path):
|
||||
log.warning("Not a directory path: {}".format(folder_path))
|
||||
return output
|
||||
|
||||
for filename in os.listdir(folder_path):
|
||||
# Ignore files which start with underscore
|
||||
if filename.startswith("_"):
|
||||
continue
|
||||
|
||||
mod_name, mod_ext = os.path.splitext(filename)
|
||||
if not mod_ext == ".py":
|
||||
continue
|
||||
|
||||
full_path = os.path.join(folder_path, filename)
|
||||
if not os.path.isfile(full_path):
|
||||
continue
|
||||
|
||||
try:
|
||||
module = import_filepath(full_path, mod_name)
|
||||
modules.append((full_path, module))
|
||||
|
||||
except Exception:
|
||||
crashed.append((full_path, sys.exc_info()))
|
||||
log.warning(
|
||||
"Failed to load path: \"{0}\"".format(full_path),
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def recursive_bases_from_class(klass):
|
||||
"""Extract all bases from entered class."""
|
||||
result = []
|
||||
bases = klass.__bases__
|
||||
result.extend(bases)
|
||||
for base in bases:
|
||||
result.extend(recursive_bases_from_class(base))
|
||||
return result
|
||||
|
||||
|
||||
def classes_from_module(superclass, module):
|
||||
"""Return plug-ins from module
|
||||
|
||||
Arguments:
|
||||
superclass (superclass): Superclass of subclasses to look for
|
||||
module (types.ModuleType): Imported module from which to
|
||||
parse valid Avalon plug-ins.
|
||||
|
||||
Returns:
|
||||
List of plug-ins, or empty list if none is found.
|
||||
|
||||
"""
|
||||
|
||||
classes = list()
|
||||
for name in dir(module):
|
||||
# It could be anything at this point
|
||||
obj = getattr(module, name)
|
||||
if not inspect.isclass(obj) or obj is superclass:
|
||||
continue
|
||||
|
||||
if issubclass(obj, superclass):
|
||||
classes.append(obj)
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
def _import_module_from_dirpath_py2(dirpath, module_name, dst_module_name):
|
||||
"""Import passed dirpath as python module using `imp`."""
|
||||
if dst_module_name:
|
||||
full_module_name = "{}.{}".format(dst_module_name, module_name)
|
||||
dst_module = sys.modules[dst_module_name]
|
||||
else:
|
||||
full_module_name = module_name
|
||||
dst_module = None
|
||||
|
||||
if full_module_name in sys.modules:
|
||||
return sys.modules[full_module_name]
|
||||
|
||||
import imp
|
||||
|
||||
fp, pathname, description = imp.find_module(module_name, [dirpath])
|
||||
module = imp.load_module(full_module_name, fp, pathname, description)
|
||||
if dst_module is not None:
|
||||
setattr(dst_module, module_name, module)
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def _import_module_from_dirpath_py3(dirpath, module_name, dst_module_name):
|
||||
"""Import passed dirpath as python module using Python 3 modules."""
|
||||
if dst_module_name:
|
||||
full_module_name = "{}.{}".format(dst_module_name, module_name)
|
||||
dst_module = sys.modules[dst_module_name]
|
||||
else:
|
||||
full_module_name = module_name
|
||||
dst_module = None
|
||||
|
||||
# Skip import if is already imported
|
||||
if full_module_name in sys.modules:
|
||||
return sys.modules[full_module_name]
|
||||
|
||||
import importlib.util
|
||||
from importlib._bootstrap_external import PathFinder
|
||||
|
||||
# Find loader for passed path and name
|
||||
loader = PathFinder.find_module(full_module_name, [dirpath])
|
||||
|
||||
# Load specs of module
|
||||
spec = importlib.util.spec_from_loader(
|
||||
full_module_name, loader, origin=dirpath
|
||||
)
|
||||
|
||||
# Create module based on specs
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
|
||||
# Store module to destination module and `sys.modules`
|
||||
# WARNING this mus be done before module execution
|
||||
if dst_module is not None:
|
||||
setattr(dst_module, module_name, module)
|
||||
|
||||
sys.modules[full_module_name] = module
|
||||
|
||||
# Execute module import
|
||||
loader.exec_module(module)
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def import_module_from_dirpath(dirpath, folder_name, dst_module_name=None):
|
||||
"""Import passed directory as a python module.
|
||||
|
||||
Python 2 and 3 compatible.
|
||||
|
||||
Imported module can be assigned as a child attribute of already loaded
|
||||
module from `sys.modules` if has support of `setattr`. That is not default
|
||||
behavior of python modules so parent module must be a custom module with
|
||||
that ability.
|
||||
|
||||
It is not possible to reimport already cached module. If you need to
|
||||
reimport module you have to remove it from caches manually.
|
||||
|
||||
Args:
|
||||
dirpath(str): Parent directory path of loaded folder.
|
||||
folder_name(str): Folder name which should be imported inside passed
|
||||
directory.
|
||||
dst_module_name(str): Parent module name under which can be loaded
|
||||
module added.
|
||||
"""
|
||||
if six.PY3:
|
||||
module = _import_module_from_dirpath_py3(
|
||||
dirpath, folder_name, dst_module_name
|
||||
)
|
||||
else:
|
||||
module = _import_module_from_dirpath_py2(
|
||||
dirpath, folder_name, dst_module_name
|
||||
)
|
||||
return module
|
||||
|
||||
|
||||
def is_func_signature_supported(func, *args, **kwargs):
|
||||
"""Check if a function signature supports passed args and kwargs.
|
||||
|
||||
This check does not actually call the function, just look if function can
|
||||
be called with the arguments.
|
||||
|
||||
Notes:
|
||||
This does NOT check if the function would work with passed arguments
|
||||
only if they can be passed in. If function have *args, **kwargs
|
||||
in paramaters, this will always return 'True'.
|
||||
|
||||
Example:
|
||||
>>> def my_function(my_number):
|
||||
... return my_number + 1
|
||||
...
|
||||
>>> is_func_signature_supported(my_function, 1)
|
||||
True
|
||||
>>> is_func_signature_supported(my_function, 1, 2)
|
||||
False
|
||||
>>> is_func_signature_supported(my_function, my_number=1)
|
||||
True
|
||||
>>> is_func_signature_supported(my_function, number=1)
|
||||
False
|
||||
>>> is_func_signature_supported(my_function, "string")
|
||||
True
|
||||
>>> def my_other_function(*args, **kwargs):
|
||||
... my_function(*args, **kwargs)
|
||||
...
|
||||
>>> is_func_signature_supported(
|
||||
... my_other_function,
|
||||
... "string",
|
||||
... 1,
|
||||
... other=None
|
||||
... )
|
||||
True
|
||||
|
||||
Args:
|
||||
func (Callable): A function where the signature should be tested.
|
||||
*args (Any): Positional arguments for function signature.
|
||||
**kwargs (Any): Keyword arguments for function signature.
|
||||
|
||||
Returns:
|
||||
bool: Function can pass in arguments.
|
||||
"""
|
||||
|
||||
if hasattr(inspect, "signature"):
|
||||
# Python 3 using 'Signature' object where we try to bind arg
|
||||
# or kwarg. Using signature is recommended approach based on
|
||||
# documentation.
|
||||
sig = inspect.signature(func)
|
||||
try:
|
||||
sig.bind(*args, **kwargs)
|
||||
return True
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
else:
|
||||
# In Python 2 'signature' is not available so 'getcallargs' is used
|
||||
# - 'getcallargs' is marked as deprecated since Python 3.0
|
||||
try:
|
||||
inspect.getcallargs(func, *args, **kwargs)
|
||||
return True
|
||||
except TypeError:
|
||||
pass
|
||||
return False
|
||||
204
client/ayon_core/lib/terminal.py
Normal file
204
client/ayon_core/lib/terminal.py
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package helping with colorizing and formatting terminal output."""
|
||||
# ::
|
||||
# //. ... .. ///. //.
|
||||
# ///\\\ \\\ \\ ///\\\ ///
|
||||
# /// \\ \\\ \\ /// \\ /// //
|
||||
# \\\ // \\\ // \\\ // \\\// ./
|
||||
# \\\// \\\// \\\// \\\' //
|
||||
# \\\ \\\ \\\ \\\//
|
||||
# ''' ''' ''' '''
|
||||
# ..---===[[ PyP3 Setup ]]===---...
|
||||
#
|
||||
import re
|
||||
import time
|
||||
import threading
|
||||
|
||||
|
||||
class Terminal:
|
||||
"""Class formatting messages using colorama to specific visual tokens.
|
||||
|
||||
If :mod:`Colorama` is not found, it will still work, but without colors.
|
||||
|
||||
Depends on :mod:`Colorama`
|
||||
Using **OPENPYPE_LOG_NO_COLORS** environment variable.
|
||||
"""
|
||||
|
||||
# Is Terminal initialized
|
||||
_initialized = False
|
||||
# Thread lock for initialization to avoid race conditions
|
||||
_init_lock = threading.Lock()
|
||||
# Use colorized output
|
||||
use_colors = True
|
||||
# Output message replacements mapping - set on initialization
|
||||
_sdict = {}
|
||||
|
||||
@staticmethod
|
||||
def _initialize():
|
||||
"""Initialize Terminal class as object.
|
||||
|
||||
First check if colorized output is disabled by environment variable
|
||||
`OPENPYPE_LOG_NO_COLORS` value. By default is colorized output turned
|
||||
on.
|
||||
|
||||
Then tries to import python module that do the colors magic and create
|
||||
it's terminal object. Colorized output is not used if import of python
|
||||
module or terminal object creation fails.
|
||||
|
||||
Set `_initialized` attribute to `True` when is done.
|
||||
"""
|
||||
|
||||
from openpype.lib import env_value_to_bool
|
||||
log_no_colors = env_value_to_bool(
|
||||
"OPENPYPE_LOG_NO_COLORS", default=None
|
||||
)
|
||||
if log_no_colors is not None:
|
||||
Terminal.use_colors = not log_no_colors
|
||||
|
||||
if not Terminal.use_colors:
|
||||
Terminal._initialized = True
|
||||
return
|
||||
|
||||
try:
|
||||
# Try to import `blessed` module and create `Terminal` object
|
||||
import blessed
|
||||
term = blessed.Terminal()
|
||||
|
||||
except Exception:
|
||||
# Do not use colors if crashed
|
||||
Terminal.use_colors = False
|
||||
print(
|
||||
"Module `blessed` failed on import or terminal creation."
|
||||
" Pype terminal won't use colors."
|
||||
)
|
||||
Terminal._initialized = True
|
||||
return
|
||||
|
||||
# shortcuts for blessed codes
|
||||
_SB = term.bold
|
||||
_RST = ""
|
||||
_LR = term.tomato2
|
||||
_LG = term.aquamarine3
|
||||
_LB = term.turquoise2
|
||||
_LM = term.slateblue2
|
||||
_LY = term.gold
|
||||
_R = term.red
|
||||
_G = term.green
|
||||
_B = term.blue
|
||||
_C = term.cyan
|
||||
_Y = term.yellow
|
||||
_W = term.white
|
||||
|
||||
# dictionary replacing string sequences with colorized one
|
||||
Terminal._sdict = {
|
||||
r">>> ": _SB + _LG + r">>> " + _RST,
|
||||
r"!!!(?!\sCRI|\sERR)": _SB + _R + r"!!! " + _RST,
|
||||
r"\-\-\- ": _SB + _C + r"--- " + _RST,
|
||||
r"\*\*\*(?!\sWRN)": _SB + _LY + r"***" + _RST,
|
||||
r"\*\*\* WRN": _SB + _LY + r"*** WRN" + _RST,
|
||||
r" \- ": _SB + _LY + r" - " + _RST,
|
||||
r"\[ ": _SB + _LG + r"[ " + _RST,
|
||||
r" \]": _SB + _LG + r" ]" + _RST,
|
||||
r"{": _LG + r"{",
|
||||
r"}": r"}" + _RST,
|
||||
r"\(": _LY + r"(",
|
||||
r"\)": r")" + _RST,
|
||||
r"^\.\.\. ": _SB + _LR + r"... " + _RST,
|
||||
r"!!! ERR: ":
|
||||
_SB + _LR + r"!!! ERR: " + _RST,
|
||||
r"!!! CRI: ":
|
||||
_SB + _R + r"!!! CRI: " + _RST,
|
||||
r"(?i)failed": _SB + _LR + "FAILED" + _RST,
|
||||
r"(?i)error": _SB + _LR + "ERROR" + _RST
|
||||
}
|
||||
|
||||
Terminal._SB = _SB
|
||||
Terminal._RST = _RST
|
||||
Terminal._LR = _LR
|
||||
Terminal._LG = _LG
|
||||
Terminal._LB = _LB
|
||||
Terminal._LM = _LM
|
||||
Terminal._LY = _LY
|
||||
Terminal._R = _R
|
||||
Terminal._G = _G
|
||||
Terminal._B = _B
|
||||
Terminal._C = _C
|
||||
Terminal._Y = _Y
|
||||
Terminal._W = _W
|
||||
|
||||
Terminal._initialized = True
|
||||
|
||||
@staticmethod
|
||||
def _multiple_replace(text, adict):
|
||||
"""Replace multiple tokens defined in dict.
|
||||
|
||||
Find and replace all occurrences of strings defined in dict is
|
||||
supplied string.
|
||||
|
||||
Args:
|
||||
text (str): string to be searched
|
||||
adict (dict): dictionary with `{'search': 'replace'}`
|
||||
|
||||
Returns:
|
||||
str: string with replaced tokens
|
||||
|
||||
"""
|
||||
for r, v in adict.items():
|
||||
text = re.sub(r, v, text)
|
||||
|
||||
return text
|
||||
|
||||
@staticmethod
|
||||
def echo(message):
|
||||
"""Print colorized message to stdout.
|
||||
|
||||
Args:
|
||||
message (str): Message to be colorized.
|
||||
debug (bool):
|
||||
|
||||
Returns:
|
||||
str: Colorized message.
|
||||
|
||||
"""
|
||||
colorized = Terminal.log(message)
|
||||
print(colorized)
|
||||
|
||||
return colorized
|
||||
|
||||
@staticmethod
|
||||
def log(message):
|
||||
"""Return color formatted message.
|
||||
|
||||
If environment variable `OPENPYPE_LOG_NO_COLORS` is set to
|
||||
whatever value, message will be formatted but not colorized.
|
||||
|
||||
Args:
|
||||
message (str): Message to be colorized.
|
||||
|
||||
Returns:
|
||||
str: Colorized message.
|
||||
|
||||
"""
|
||||
T = Terminal
|
||||
# Initialize if not yet initialized and use thread lock to avoid race
|
||||
# condition issues
|
||||
if not T._initialized:
|
||||
# Check if lock is already locked to be sure `_initialize` is not
|
||||
# executed multiple times
|
||||
if not T._init_lock.locked():
|
||||
with T._init_lock:
|
||||
T._initialize()
|
||||
else:
|
||||
# If lock is locked wait until is finished
|
||||
while T._init_lock.locked():
|
||||
time.sleep(0.1)
|
||||
|
||||
# if we dont want colors, just print raw message
|
||||
if not T.use_colors:
|
||||
return message
|
||||
|
||||
message = re.sub(r'\[(.*)\]', '[ ' + T._SB + T._W +
|
||||
r'\1' + T._RST + ' ]', message)
|
||||
message = T._multiple_replace(message + T._RST, T._sdict)
|
||||
|
||||
return message
|
||||
1454
client/ayon_core/lib/transcoding.py
Normal file
1454
client/ayon_core/lib/transcoding.py
Normal file
File diff suppressed because it is too large
Load diff
361
client/ayon_core/lib/usdlib.py
Normal file
361
client/ayon_core/lib/usdlib.py
Normal file
|
|
@ -0,0 +1,361 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
try:
|
||||
from pxr import Usd, UsdGeom, Sdf, Kind
|
||||
except ImportError:
|
||||
# Allow to fall back on Multiverse 6.3.0+ pxr usd library
|
||||
from mvpxr import Usd, UsdGeom, Sdf, Kind
|
||||
|
||||
from openpype.client import get_project, get_asset_by_name
|
||||
from openpype.pipeline import Anatomy, get_current_project_name
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# The predefined steps order used for bootstrapping USD Shots and Assets.
|
||||
# These are ordered in order from strongest to weakest opinions, like in USD.
|
||||
PIPELINE = {
|
||||
"shot": [
|
||||
"usdLighting",
|
||||
"usdFx",
|
||||
"usdSimulation",
|
||||
"usdAnimation",
|
||||
"usdLayout",
|
||||
],
|
||||
"asset": ["usdShade", "usdModel"],
|
||||
}
|
||||
|
||||
|
||||
def create_asset(
|
||||
filepath, asset_name, reference_layers, kind=Kind.Tokens.component
|
||||
):
|
||||
"""
|
||||
Creates an asset file that consists of a top level layer and sublayers for
|
||||
shading and geometry.
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
reference_layers (list): USD Files to reference in the asset.
|
||||
Note that the bottom layer (first file, like a model) would
|
||||
be last in the list. The strongest layer will be the first
|
||||
index.
|
||||
asset_name (str): The name for the Asset identifier and default prim.
|
||||
kind (pxr.Kind): A USD Kind for the root asset.
|
||||
|
||||
"""
|
||||
# Also see create_asset.py in PixarAnimationStudios/USD endToEnd example
|
||||
|
||||
log.info("Creating asset at %s", filepath)
|
||||
|
||||
# Make the layer ascii - good for readability, plus the file is small
|
||||
root_layer = Sdf.Layer.CreateNew(filepath, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
|
||||
# Define a prim for the asset and make it the default for the stage.
|
||||
asset_prim = UsdGeom.Xform.Define(stage, "/%s" % asset_name).GetPrim()
|
||||
stage.SetDefaultPrim(asset_prim)
|
||||
|
||||
# Let viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# Usually we will "loft up" the kind authored into the exported geometry
|
||||
# layer rather than re-stamping here; we'll leave that for a later
|
||||
# tutorial, and just be explicit here.
|
||||
model = Usd.ModelAPI(asset_prim)
|
||||
if kind:
|
||||
model.SetKind(kind)
|
||||
|
||||
model.SetAssetName(asset_name)
|
||||
model.SetAssetIdentifier("%s/%s.usd" % (asset_name, asset_name))
|
||||
|
||||
# Add references to the asset prim
|
||||
references = asset_prim.GetReferences()
|
||||
for reference_filepath in reference_layers:
|
||||
references.AddReference(reference_filepath)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shot(filepath, layers, create_layers=False):
|
||||
"""Create a shot with separate layers for departments.
|
||||
|
||||
Args:
|
||||
filepath (str): Filepath where the asset.usd file will be saved.
|
||||
layers (str): When provided this will be added verbatim in the
|
||||
subLayerPaths layers. When the provided layer paths do not exist
|
||||
they are generated using Sdf.Layer.CreateNew
|
||||
create_layers (bool): Whether to create the stub layers on disk if
|
||||
they do not exist yet.
|
||||
|
||||
Returns:
|
||||
str: The saved shot file path
|
||||
|
||||
"""
|
||||
# Also see create_shot.py in PixarAnimationStudios/USD endToEnd example
|
||||
|
||||
stage = Usd.Stage.CreateNew(filepath)
|
||||
log.info("Creating shot at %s" % filepath)
|
||||
|
||||
for layer_path in layers:
|
||||
if create_layers and not os.path.exists(layer_path):
|
||||
# We use the Sdf API here to quickly create layers. Also, we're
|
||||
# using it as a way to author the subLayerPaths as there is no
|
||||
# way to do that directly in the Usd API.
|
||||
layer_folder = os.path.dirname(layer_path)
|
||||
if not os.path.exists(layer_folder):
|
||||
os.makedirs(layer_folder)
|
||||
|
||||
Sdf.Layer.CreateNew(layer_path)
|
||||
|
||||
stage.GetRootLayer().subLayerPaths.append(layer_path)
|
||||
|
||||
# Lets viewing applications know how to orient a free camera properly
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
def create_model(filename, asset, variant_subsets):
|
||||
"""Create a USD Model file.
|
||||
|
||||
For each of the variation paths it will payload the path and set its
|
||||
relevant variation name.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset)
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
for subset in variant_subsets:
|
||||
prefix = "usdModel"
|
||||
if subset.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = subset[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model subsets must start " "with usdModel: %s" % subset
|
||||
)
|
||||
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename,
|
||||
variants=variants,
|
||||
variantset="model",
|
||||
variant_prim="/root",
|
||||
reference_prim="/root/geo",
|
||||
as_payload=True,
|
||||
)
|
||||
|
||||
UsdGeom.SetStageMetersPerUnit(stage, 1)
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
|
||||
|
||||
# modelAPI = Usd.ModelAPI(root_prim)
|
||||
# modelAPI.SetKind(Kind.Tokens.component)
|
||||
|
||||
# See http://openusd.org/docs/api/class_usd_model_a_p_i.html#details
|
||||
# for more on assetInfo
|
||||
# modelAPI.SetAssetName(asset)
|
||||
# modelAPI.SetAssetIdentifier(asset)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shade(filename, asset, variant_subsets):
|
||||
"""Create a master USD shade file for an asset.
|
||||
|
||||
For each available model variation this should generate a reference
|
||||
to a `usdShade_{modelVariant}` subset.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset)
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
|
||||
for subset in variant_subsets:
|
||||
prefix = "usdModel"
|
||||
if subset.startswith(prefix):
|
||||
# Strip off `usdModel_`
|
||||
variant = subset[len(prefix):]
|
||||
else:
|
||||
raise ValueError(
|
||||
"Model subsets must start " "with usdModel: %s" % subset
|
||||
)
|
||||
|
||||
shade_subset = re.sub("^usdModel", "usdShade", subset)
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=shade_subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="model", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def create_shade_variation(filename, asset, model_variant, shade_variants):
|
||||
"""Create the master Shade file for a specific model variant.
|
||||
|
||||
This should reference all shade variants for the specific model variant.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset)
|
||||
assert asset_doc, "Asset not found: %s" % asset
|
||||
|
||||
variants = []
|
||||
for variant in shade_variants:
|
||||
subset = "usdShade_{model}_{shade}".format(
|
||||
model=model_variant, shade=variant
|
||||
)
|
||||
path = get_usd_master_path(
|
||||
asset=asset_doc, subset=subset, representation="usd"
|
||||
)
|
||||
variants.append((variant, path))
|
||||
|
||||
stage = _create_variants_file(
|
||||
filename, variants=variants, variantset="shade", variant_prim="/root"
|
||||
)
|
||||
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
|
||||
def _create_variants_file(
|
||||
filename,
|
||||
variants,
|
||||
variantset,
|
||||
default_variant=None,
|
||||
variant_prim="/root",
|
||||
reference_prim=None,
|
||||
set_default_variant=True,
|
||||
as_payload=False,
|
||||
skip_variant_on_single_file=True,
|
||||
):
|
||||
|
||||
root_layer = Sdf.Layer.CreateNew(filename, args={"format": "usda"})
|
||||
stage = Usd.Stage.Open(root_layer)
|
||||
|
||||
root_prim = stage.DefinePrim(variant_prim)
|
||||
stage.SetDefaultPrim(root_prim)
|
||||
|
||||
def _reference(path):
|
||||
"""Reference/Payload path depending on function arguments"""
|
||||
|
||||
if reference_prim:
|
||||
prim = stage.DefinePrim(reference_prim)
|
||||
else:
|
||||
prim = root_prim
|
||||
|
||||
if as_payload:
|
||||
# Payload
|
||||
prim.GetPayloads().AddPayload(Sdf.Payload(path))
|
||||
else:
|
||||
# Reference
|
||||
prim.GetReferences().AddReference(Sdf.Reference(path))
|
||||
|
||||
assert variants, "Must have variants, got: %s" % variants
|
||||
|
||||
log.info(filename)
|
||||
|
||||
if skip_variant_on_single_file and len(variants) == 1:
|
||||
# Reference directly, no variants
|
||||
variant_path = variants[0][1]
|
||||
_reference(variant_path)
|
||||
|
||||
log.info("Non-variants..")
|
||||
log.info("Path: %s" % variant_path)
|
||||
|
||||
else:
|
||||
# Variants
|
||||
append = Usd.ListPositionBackOfAppendList
|
||||
variant_set = root_prim.GetVariantSets().AddVariantSet(
|
||||
variantset, append
|
||||
)
|
||||
|
||||
for variant, variant_path in variants:
|
||||
|
||||
if default_variant is None:
|
||||
default_variant = variant
|
||||
|
||||
variant_set.AddVariant(variant, append)
|
||||
variant_set.SetVariantSelection(variant)
|
||||
with variant_set.GetVariantEditContext():
|
||||
_reference(variant_path)
|
||||
|
||||
log.info("Variants..")
|
||||
log.info("Variant: %s" % variant)
|
||||
log.info("Path: %s" % variant_path)
|
||||
|
||||
if set_default_variant:
|
||||
variant_set.SetVariantSelection(default_variant)
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def get_usd_master_path(asset, subset, representation):
|
||||
"""Get the filepath for a .usd file of a subset.
|
||||
|
||||
This will return the path to an unversioned master file generated by
|
||||
`usd_master_file.py`.
|
||||
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
anatomy = Anatomy(project_name)
|
||||
project_doc = get_project(
|
||||
project_name,
|
||||
fields=["name", "data.code"]
|
||||
)
|
||||
|
||||
if isinstance(asset, dict) and "name" in asset:
|
||||
# Allow explicitly passing asset document
|
||||
asset_doc = asset
|
||||
else:
|
||||
asset_doc = get_asset_by_name(project_name, asset, fields=["name"])
|
||||
|
||||
template_obj = anatomy.templates_obj["publish"]["path"]
|
||||
path = template_obj.format_strict(
|
||||
{
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"code": project_doc.get("data", {}).get("code")
|
||||
},
|
||||
"folder": {
|
||||
"name": asset_doc["name"],
|
||||
},
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset,
|
||||
"representation": representation,
|
||||
"version": 0, # stub version zero
|
||||
}
|
||||
)
|
||||
|
||||
# Remove the version folder
|
||||
subset_folder = os.path.dirname(os.path.dirname(path))
|
||||
master_folder = os.path.join(subset_folder, "master")
|
||||
fname = "{0}.{1}".format(subset, representation)
|
||||
|
||||
return os.path.join(master_folder, fname).replace("\\", "/")
|
||||
|
||||
|
||||
def parse_avalon_uri(uri):
|
||||
# URI Pattern: avalon://{asset}/{subset}.{ext}
|
||||
pattern = r"avalon://(?P<asset>[^/.]*)/(?P<subset>[^/]*)\.(?P<ext>.*)"
|
||||
if uri.startswith("avalon://"):
|
||||
match = re.match(pattern, uri)
|
||||
if match:
|
||||
return match.groupdict()
|
||||
528
client/ayon_core/lib/vendor_bin_utils.py
Normal file
528
client/ayon_core/lib/vendor_bin_utils.py
Normal file
|
|
@ -0,0 +1,528 @@
|
|||
import os
|
||||
import logging
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
|
||||
log = logging.getLogger("Vendor utils")
|
||||
|
||||
|
||||
class ToolNotFoundError(Exception):
|
||||
"""Raised when tool arguments are not found."""
|
||||
|
||||
|
||||
class CachedToolPaths:
|
||||
"""Cache already used and discovered tools and their executables.
|
||||
|
||||
Discovering path can take some time and can trigger subprocesses so it's
|
||||
better to cache the paths on first get.
|
||||
"""
|
||||
|
||||
_cached_paths = {}
|
||||
|
||||
@classmethod
|
||||
def is_tool_cached(cls, tool):
|
||||
return tool in cls._cached_paths
|
||||
|
||||
@classmethod
|
||||
def get_executable_path(cls, tool):
|
||||
return cls._cached_paths.get(tool)
|
||||
|
||||
@classmethod
|
||||
def cache_executable_path(cls, tool, path):
|
||||
cls._cached_paths[tool] = path
|
||||
|
||||
|
||||
def is_file_executable(filepath):
|
||||
"""Filepath lead to executable file.
|
||||
|
||||
Args:
|
||||
filepath(str): Full path to file.
|
||||
"""
|
||||
if not filepath:
|
||||
return False
|
||||
|
||||
if os.path.isfile(filepath):
|
||||
if os.access(filepath, os.X_OK):
|
||||
return True
|
||||
|
||||
log.info(
|
||||
"Filepath is not available for execution \"{}\"".format(filepath)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def find_executable(executable):
|
||||
"""Find full path to executable.
|
||||
|
||||
Also tries additional extensions if passed executable does not contain one.
|
||||
|
||||
Paths where it is looked for executable is defined by 'PATH' environment
|
||||
variable, 'os.confstr("CS_PATH")' or 'os.defpath'.
|
||||
|
||||
Args:
|
||||
executable(str): Name of executable with or without extension. Can be
|
||||
path to file.
|
||||
|
||||
Returns:
|
||||
Union[str, None]: Full path to executable with extension which was
|
||||
found otherwise None.
|
||||
"""
|
||||
|
||||
# Skip if passed path is file
|
||||
if is_file_executable(executable):
|
||||
return executable
|
||||
|
||||
low_platform = platform.system().lower()
|
||||
_, ext = os.path.splitext(executable)
|
||||
|
||||
# Prepare extensions to check
|
||||
exts = set()
|
||||
if ext:
|
||||
exts.add(ext.lower())
|
||||
|
||||
else:
|
||||
# Add other possible extension variants only if passed executable
|
||||
# does not have any
|
||||
if low_platform == "windows":
|
||||
exts |= {".exe", ".ps1", ".bat"}
|
||||
for ext in os.getenv("PATHEXT", "").split(os.pathsep):
|
||||
exts.add(ext.lower())
|
||||
|
||||
else:
|
||||
exts |= {".sh"}
|
||||
|
||||
# Executable is a path but there may be missing extension
|
||||
# - this can happen primarily on windows where
|
||||
# e.g. "ffmpeg" should be "ffmpeg.exe"
|
||||
exe_dir, exe_filename = os.path.split(executable)
|
||||
if exe_dir and os.path.isdir(exe_dir):
|
||||
for filename in os.listdir(exe_dir):
|
||||
filepath = os.path.join(exe_dir, filename)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if (
|
||||
basename == exe_filename
|
||||
and ext.lower() in exts
|
||||
and is_file_executable(filepath)
|
||||
):
|
||||
return filepath
|
||||
|
||||
# Get paths where to look for executable
|
||||
path_str = os.environ.get("PATH", None)
|
||||
if path_str is None:
|
||||
if hasattr(os, "confstr"):
|
||||
path_str = os.confstr("CS_PATH")
|
||||
elif hasattr(os, "defpath"):
|
||||
path_str = os.defpath
|
||||
|
||||
if not path_str:
|
||||
return None
|
||||
|
||||
paths = path_str.split(os.pathsep)
|
||||
for path in paths:
|
||||
if not os.path.isdir(path):
|
||||
continue
|
||||
for filename in os.listdir(path):
|
||||
filepath = os.path.abspath(os.path.join(path, filename))
|
||||
# Filename matches executable exactly
|
||||
if filename == executable and is_file_executable(filepath):
|
||||
return filepath
|
||||
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if (
|
||||
basename == executable
|
||||
and ext.lower() in exts
|
||||
and is_file_executable(filepath)
|
||||
):
|
||||
return filepath
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_vendor_bin_path(bin_app):
|
||||
"""Path to OpenPype vendorized binaries.
|
||||
|
||||
Vendorized executables are expected in specific hierarchy inside build or
|
||||
in code source.
|
||||
|
||||
"{OPENPYPE_ROOT}/vendor/bin/{name of vendorized app}/{platform}"
|
||||
|
||||
Args:
|
||||
bin_app (str): Name of vendorized application.
|
||||
|
||||
Returns:
|
||||
str: Path to vendorized binaries folder.
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
os.environ["OPENPYPE_ROOT"],
|
||||
"vendor",
|
||||
"bin",
|
||||
bin_app,
|
||||
platform.system().lower()
|
||||
)
|
||||
|
||||
|
||||
def find_tool_in_custom_paths(paths, tool, validation_func=None):
|
||||
"""Find a tool executable in custom paths.
|
||||
|
||||
Args:
|
||||
paths (Iterable[str]): Iterable of paths where to look for tool.
|
||||
tool (str): Name of tool (binary file) to find in passed paths.
|
||||
validation_func (Function): Custom validation function of path.
|
||||
Function must expect one argument which is path to executable.
|
||||
If not passed only 'find_executable' is used to be able identify
|
||||
if path is valid.
|
||||
|
||||
Reuturns:
|
||||
Union[str, None]: Path to validated executable or None if was not
|
||||
found.
|
||||
"""
|
||||
|
||||
for path in paths:
|
||||
# Skip empty strings
|
||||
if not path:
|
||||
continue
|
||||
|
||||
# Handle cases when path is just an executable
|
||||
# - it allows to use executable from PATH
|
||||
# - basename must match 'tool' value (without extension)
|
||||
extless_path, ext = os.path.splitext(path)
|
||||
if extless_path == tool:
|
||||
executable_path = find_executable(tool)
|
||||
if executable_path and (
|
||||
validation_func is None
|
||||
or validation_func(executable_path)
|
||||
):
|
||||
return executable_path
|
||||
continue
|
||||
|
||||
# Normalize path because it should be a path and check if exists
|
||||
normalized = os.path.normpath(path)
|
||||
if not os.path.exists(normalized):
|
||||
continue
|
||||
|
||||
# Note: Path can be both file and directory
|
||||
|
||||
# If path is a file validate it
|
||||
if os.path.isfile(normalized):
|
||||
basename, ext = os.path.splitext(os.path.basename(path))
|
||||
# Check if the filename has actually the sane bane as 'tool'
|
||||
if basename == tool:
|
||||
executable_path = find_executable(normalized)
|
||||
if executable_path and (
|
||||
validation_func is None
|
||||
or validation_func(executable_path)
|
||||
):
|
||||
return executable_path
|
||||
|
||||
# Check if path is a directory and look for tool inside the dir
|
||||
if os.path.isdir(normalized):
|
||||
executable_path = find_executable(os.path.join(normalized, tool))
|
||||
if executable_path and (
|
||||
validation_func is None
|
||||
or validation_func(executable_path)
|
||||
):
|
||||
return executable_path
|
||||
return None
|
||||
|
||||
|
||||
def _check_args_returncode(args):
|
||||
try:
|
||||
kwargs = {}
|
||||
if platform.system().lower() == "windows":
|
||||
kwargs["creationflags"] = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| getattr(subprocess, "DETACHED_PROCESS", 0)
|
||||
| getattr(subprocess, "CREATE_NO_WINDOW", 0)
|
||||
)
|
||||
|
||||
if hasattr(subprocess, "DEVNULL"):
|
||||
proc = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
**kwargs
|
||||
)
|
||||
proc.wait()
|
||||
else:
|
||||
with open(os.devnull, "w") as devnull:
|
||||
proc = subprocess.Popen(
|
||||
args, stdout=devnull, stderr=devnull, **kwargs
|
||||
)
|
||||
proc.wait()
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
return proc.returncode == 0
|
||||
|
||||
|
||||
def _oiio_executable_validation(args):
|
||||
"""Validate oiio tool executable if can be executed.
|
||||
|
||||
Validation has 2 steps. First is using 'find_executable' to fill possible
|
||||
missing extension or fill directory then launch executable and validate
|
||||
that it can be executed. For that is used '--help' argument which is fast
|
||||
and does not need any other inputs.
|
||||
|
||||
Any possible crash of missing libraries or invalid build should be caught.
|
||||
|
||||
Main reason is to validate if executable can be executed on OS just running
|
||||
which can be issue ob linux machines.
|
||||
|
||||
Note:
|
||||
It does not validate if the executable is really a oiio tool which
|
||||
should be used.
|
||||
|
||||
Args:
|
||||
args (Union[str, list[str]]): Arguments to launch tool or
|
||||
path to tool executable.
|
||||
|
||||
Returns:
|
||||
bool: Filepath is valid executable.
|
||||
"""
|
||||
|
||||
if not args:
|
||||
return False
|
||||
|
||||
if not isinstance(args, list):
|
||||
filepath = find_executable(args)
|
||||
if not filepath:
|
||||
return False
|
||||
args = [filepath]
|
||||
return _check_args_returncode(args + ["--help"])
|
||||
|
||||
|
||||
def _get_ayon_oiio_tool_args(tool_name):
|
||||
try:
|
||||
# Use 'ayon-third-party' addon to get oiio arguments
|
||||
from ayon_third_party import get_oiio_arguments
|
||||
except Exception:
|
||||
print("!!! Failed to import 'ayon_third_party' addon.")
|
||||
return None
|
||||
|
||||
try:
|
||||
return get_oiio_arguments(tool_name)
|
||||
except Exception as exc:
|
||||
print("!!! Failed to get OpenImageIO args. Reason: {}".format(exc))
|
||||
return None
|
||||
|
||||
|
||||
def get_oiio_tools_path(tool="oiiotool"):
|
||||
"""Path to OpenImageIO tool executables.
|
||||
|
||||
On Windows it adds .exe extension if missing from tool argument.
|
||||
|
||||
Args:
|
||||
tool (string): Tool name 'oiiotool', 'maketx', etc.
|
||||
Default is "oiiotool".
|
||||
"""
|
||||
|
||||
if CachedToolPaths.is_tool_cached(tool):
|
||||
return CachedToolPaths.get_executable_path(tool)
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
args = _get_ayon_oiio_tool_args(tool)
|
||||
if args:
|
||||
if len(args) > 1:
|
||||
raise ValueError(
|
||||
"AYON oiio arguments consist of multiple arguments."
|
||||
)
|
||||
tool_executable_path = args[0]
|
||||
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
|
||||
return tool_executable_path
|
||||
|
||||
custom_paths_str = os.environ.get("OPENPYPE_OIIO_PATHS") or ""
|
||||
tool_executable_path = find_tool_in_custom_paths(
|
||||
custom_paths_str.split(os.pathsep),
|
||||
tool,
|
||||
_oiio_executable_validation
|
||||
)
|
||||
|
||||
if not tool_executable_path:
|
||||
oiio_dir = get_vendor_bin_path("oiio")
|
||||
if platform.system().lower() == "linux":
|
||||
oiio_dir = os.path.join(oiio_dir, "bin")
|
||||
default_path = find_executable(os.path.join(oiio_dir, tool))
|
||||
if default_path and _oiio_executable_validation(default_path):
|
||||
tool_executable_path = default_path
|
||||
|
||||
# Look to PATH for the tool
|
||||
if not tool_executable_path:
|
||||
from_path = find_executable(tool)
|
||||
if from_path and _oiio_executable_validation(from_path):
|
||||
tool_executable_path = from_path
|
||||
|
||||
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
|
||||
return tool_executable_path
|
||||
|
||||
|
||||
def get_oiio_tool_args(tool_name, *extra_args):
|
||||
"""Arguments to launch OpenImageIO tool.
|
||||
|
||||
Args:
|
||||
tool_name (str): Tool name 'oiiotool', 'maketx', etc.
|
||||
*extra_args (str): Extra arguments to add to after tool arguments.
|
||||
|
||||
Returns:
|
||||
list[str]: List of arguments.
|
||||
"""
|
||||
|
||||
extra_args = list(extra_args)
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
args = _get_ayon_oiio_tool_args(tool_name)
|
||||
if args:
|
||||
return args + extra_args
|
||||
|
||||
path = get_oiio_tools_path(tool_name)
|
||||
if path:
|
||||
return [path] + extra_args
|
||||
raise ToolNotFoundError(
|
||||
"OIIO '{}' tool not found.".format(tool_name)
|
||||
)
|
||||
|
||||
|
||||
def _ffmpeg_executable_validation(args):
|
||||
"""Validate ffmpeg tool executable if can be executed.
|
||||
|
||||
Validation has 2 steps. First is using 'find_executable' to fill possible
|
||||
missing extension or fill directory then launch executable and validate
|
||||
that it can be executed. For that is used '-version' argument which is fast
|
||||
and does not need any other inputs.
|
||||
|
||||
Any possible crash of missing libraries or invalid build should be caught.
|
||||
|
||||
Main reason is to validate if executable can be executed on OS just running
|
||||
which can be issue ob linux machines.
|
||||
|
||||
Note:
|
||||
It does not validate if the executable is really a ffmpeg tool.
|
||||
|
||||
Args:
|
||||
args (Union[str, list[str]]): Arguments to launch tool or
|
||||
path to tool executable.
|
||||
|
||||
Returns:
|
||||
bool: Filepath is valid executable.
|
||||
"""
|
||||
|
||||
if not args:
|
||||
return False
|
||||
|
||||
if not isinstance(args, list):
|
||||
filepath = find_executable(args)
|
||||
if not filepath:
|
||||
return False
|
||||
args = [filepath]
|
||||
return _check_args_returncode(args + ["--help"])
|
||||
|
||||
|
||||
def _get_ayon_ffmpeg_tool_args(tool_name):
|
||||
try:
|
||||
# Use 'ayon-third-party' addon to get ffmpeg arguments
|
||||
from ayon_third_party import get_ffmpeg_arguments
|
||||
|
||||
except Exception:
|
||||
print("!!! Failed to import 'ayon_third_party' addon.")
|
||||
return None
|
||||
|
||||
try:
|
||||
return get_ffmpeg_arguments(tool_name)
|
||||
except Exception as exc:
|
||||
print("!!! Failed to get FFmpeg args. Reason: {}".format(exc))
|
||||
return None
|
||||
|
||||
|
||||
def get_ffmpeg_tool_path(tool="ffmpeg"):
|
||||
"""Path to vendorized FFmpeg executable.
|
||||
|
||||
Args:
|
||||
tool (str): Tool name 'ffmpeg', 'ffprobe', etc.
|
||||
Default is "ffmpeg".
|
||||
|
||||
Returns:
|
||||
str: Full path to ffmpeg executable.
|
||||
"""
|
||||
|
||||
if CachedToolPaths.is_tool_cached(tool):
|
||||
return CachedToolPaths.get_executable_path(tool)
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
args = _get_ayon_ffmpeg_tool_args(tool)
|
||||
if args is not None:
|
||||
if len(args) > 1:
|
||||
raise ValueError(
|
||||
"AYON ffmpeg arguments consist of multiple arguments."
|
||||
)
|
||||
tool_executable_path = args[0]
|
||||
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
|
||||
return tool_executable_path
|
||||
|
||||
custom_paths_str = os.environ.get("OPENPYPE_FFMPEG_PATHS") or ""
|
||||
tool_executable_path = find_tool_in_custom_paths(
|
||||
custom_paths_str.split(os.pathsep),
|
||||
tool,
|
||||
_ffmpeg_executable_validation
|
||||
)
|
||||
|
||||
if not tool_executable_path:
|
||||
ffmpeg_dir = get_vendor_bin_path("ffmpeg")
|
||||
if platform.system().lower() == "windows":
|
||||
ffmpeg_dir = os.path.join(ffmpeg_dir, "bin")
|
||||
tool_path = find_executable(os.path.join(ffmpeg_dir, tool))
|
||||
if tool_path and _ffmpeg_executable_validation(tool_path):
|
||||
tool_executable_path = tool_path
|
||||
|
||||
# Look to PATH for the tool
|
||||
if not tool_executable_path:
|
||||
from_path = find_executable(tool)
|
||||
if from_path and _ffmpeg_executable_validation(from_path):
|
||||
tool_executable_path = from_path
|
||||
|
||||
CachedToolPaths.cache_executable_path(tool, tool_executable_path)
|
||||
return tool_executable_path
|
||||
|
||||
|
||||
def get_ffmpeg_tool_args(tool_name, *extra_args):
|
||||
"""Arguments to launch FFmpeg tool.
|
||||
|
||||
Args:
|
||||
tool_name (str): Tool name 'ffmpeg', 'ffprobe', exc.
|
||||
*extra_args (str): Extra arguments to add to after tool arguments.
|
||||
|
||||
Returns:
|
||||
list[str]: List of arguments.
|
||||
"""
|
||||
|
||||
extra_args = list(extra_args)
|
||||
|
||||
if AYON_SERVER_ENABLED:
|
||||
args = _get_ayon_ffmpeg_tool_args(tool_name)
|
||||
if args:
|
||||
return args + extra_args
|
||||
|
||||
executable_path = get_ffmpeg_tool_path(tool_name)
|
||||
if executable_path:
|
||||
return [executable_path] + extra_args
|
||||
raise ToolNotFoundError(
|
||||
"FFmpeg '{}' tool not found.".format(tool_name)
|
||||
)
|
||||
|
||||
|
||||
def is_oiio_supported():
|
||||
"""Checks if oiiotool is configured for this platform.
|
||||
|
||||
Returns:
|
||||
bool: OIIO tool executable is available.
|
||||
"""
|
||||
|
||||
try:
|
||||
args = get_oiio_tool_args("oiiotool")
|
||||
except ToolNotFoundError:
|
||||
args = None
|
||||
if not args:
|
||||
log.debug("OIIOTool is not configured or not present.")
|
||||
return False
|
||||
return _oiio_executable_validation(args)
|
||||
Loading…
Add table
Add a link
Reference in a new issue