mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge develop
This commit is contained in:
commit
51e57e5edd
2279 changed files with 8048 additions and 185750 deletions
2
.gitmodules
vendored
2
.gitmodules
vendored
|
|
@ -1,3 +1,3 @@
|
|||
[submodule "client/ayon_core/hosts/unreal/integration"]
|
||||
path = client/ayon_core/hosts/unreal/integration
|
||||
path = server_addon/unreal/client/ayon_unreal/integration
|
||||
url = https://github.com/ynput/ayon-unreal-plugin.git
|
||||
|
|
|
|||
|
|
@ -35,14 +35,14 @@ AYON addons should contain separated logic of specific kind of implementation, s
|
|||
- addon has more logic when used in a tray
|
||||
- it is possible that addon can be used only in the tray
|
||||
- abstract methods
|
||||
- `tray_init` - initialization triggered after `initialize` when used in `TrayModulesManager` and before `connect_with_addons`
|
||||
- `tray_init` - initialization triggered after `initialize` when used in `TrayAddonsManager` and before `connect_with_addons`
|
||||
- `tray_menu` - add actions to tray widget's menu that represent the addon
|
||||
- `tray_start` - start of addon's login in tray
|
||||
- addon is initialized and connected with other addons
|
||||
- `tray_exit` - addon's cleanup like stop and join threads etc.
|
||||
- order of calling is based on implementation this order is how it works with `TrayModulesManager`
|
||||
- order of calling is based on implementation this order is how it works with `TrayAddonsManager`
|
||||
- it is recommended to import and use GUI implementation only in these methods
|
||||
- has attribute `tray_initialized` (bool) which is set to False by default and is set by `TrayModulesManager` to True after `tray_init`
|
||||
- has attribute `tray_initialized` (bool) which is set to False by default and is set by `TrayAddonsManager` to True after `tray_init`
|
||||
- if addon has logic only in tray or for both then should be checking for `tray_initialized` attribute to decide how should handle situations
|
||||
|
||||
### ITrayService
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import inspect
|
|||
import logging
|
||||
import threading
|
||||
import collections
|
||||
|
||||
from uuid import uuid4
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
|
|
@ -38,21 +37,36 @@ IGNORED_DEFAULT_FILENAMES = (
|
|||
"base.py",
|
||||
"interfaces.py",
|
||||
"click_wrap.py",
|
||||
"example_addons",
|
||||
"default_modules",
|
||||
)
|
||||
IGNORED_HOSTS_IN_AYON = {
|
||||
"flame",
|
||||
"harmony",
|
||||
}
|
||||
IGNORED_MODULES_IN_AYON = set()
|
||||
|
||||
# When addon was moved from ayon-core codebase
|
||||
# - this is used to log the missing addon
|
||||
MOVED_ADDON_MILESTONE_VERSIONS = {
|
||||
"aftereffects": VersionInfo(0, 2, 0),
|
||||
"applications": VersionInfo(0, 2, 0),
|
||||
"blender": VersionInfo(0, 2, 0),
|
||||
"celaction": VersionInfo(0, 2, 0),
|
||||
"clockify": VersionInfo(0, 2, 0),
|
||||
"deadline": VersionInfo(0, 2, 0),
|
||||
"flame": VersionInfo(0, 2, 0),
|
||||
"fusion": VersionInfo(0, 2, 0),
|
||||
"harmony": VersionInfo(0, 2, 0),
|
||||
"hiero": VersionInfo(0, 2, 0),
|
||||
"max": VersionInfo(0, 2, 0),
|
||||
"photoshop": VersionInfo(0, 2, 0),
|
||||
"timers_manager": VersionInfo(0, 2, 0),
|
||||
"traypublisher": VersionInfo(0, 2, 0),
|
||||
"tvpaint": VersionInfo(0, 2, 0),
|
||||
"maya": VersionInfo(0, 2, 0),
|
||||
"nuke": VersionInfo(0, 2, 0),
|
||||
"resolve": VersionInfo(0, 2, 0),
|
||||
"royalrender": VersionInfo(0, 2, 0),
|
||||
"substancepainter": VersionInfo(0, 2, 0),
|
||||
"houdini": VersionInfo(0, 3, 0),
|
||||
"unreal": VersionInfo(0, 2, 0),
|
||||
}
|
||||
|
||||
|
||||
# Inherit from `object` for Python 2 hosts
|
||||
class _ModuleClass(object):
|
||||
"""Fake module class for storing AYON addons.
|
||||
|
|
@ -400,12 +414,6 @@ def _load_addons_in_core(
|
|||
hosts_dir = os.path.join(AYON_CORE_ROOT, "hosts")
|
||||
modules_dir = os.path.join(AYON_CORE_ROOT, "modules")
|
||||
|
||||
ignored_host_names = set(IGNORED_HOSTS_IN_AYON)
|
||||
ignored_module_dir_filenames = (
|
||||
set(IGNORED_DEFAULT_FILENAMES)
|
||||
| IGNORED_MODULES_IN_AYON
|
||||
)
|
||||
|
||||
for dirpath in {hosts_dir, modules_dir}:
|
||||
if not os.path.exists(dirpath):
|
||||
log.warning((
|
||||
|
|
@ -414,10 +422,9 @@ def _load_addons_in_core(
|
|||
continue
|
||||
|
||||
is_in_modules_dir = dirpath == modules_dir
|
||||
ignored_filenames = set()
|
||||
if is_in_modules_dir:
|
||||
ignored_filenames = ignored_module_dir_filenames
|
||||
else:
|
||||
ignored_filenames = ignored_host_names
|
||||
ignored_filenames = set(IGNORED_DEFAULT_FILENAMES)
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
# Ignore filenames
|
||||
|
|
@ -483,9 +490,6 @@ def _load_addons_in_core(
|
|||
|
||||
|
||||
def _load_addons():
|
||||
# Support to use 'openpype' imports
|
||||
sys.modules["openpype"] = sys.modules["ayon_core"]
|
||||
|
||||
# Key under which will be modules imported in `sys.modules`
|
||||
modules_key = "openpype_modules"
|
||||
|
||||
|
|
@ -538,6 +542,9 @@ class AYONAddon(object):
|
|||
enabled = True
|
||||
_id = None
|
||||
|
||||
# Temporary variable for 'version' property
|
||||
_missing_version_warned = False
|
||||
|
||||
def __init__(self, manager, settings):
|
||||
self.manager = manager
|
||||
|
||||
|
|
@ -568,6 +575,26 @@ class AYONAddon(object):
|
|||
|
||||
pass
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Addon version.
|
||||
|
||||
Todo:
|
||||
Should be abstract property (required). Introduced in
|
||||
ayon-core 0.3.3 .
|
||||
|
||||
Returns:
|
||||
str: Addon version as semver compatible string.
|
||||
|
||||
"""
|
||||
if not self.__class__._missing_version_warned:
|
||||
self.__class__._missing_version_warned = True
|
||||
print(
|
||||
f"DEV WARNING: Addon '{self.name}' does not have"
|
||||
f" defined version."
|
||||
)
|
||||
return "0.0.0"
|
||||
|
||||
def initialize(self, settings):
|
||||
"""Initialization of addon attributes.
|
||||
|
||||
|
|
@ -683,6 +710,30 @@ class OpenPypeAddOn(OpenPypeModule):
|
|||
enabled = True
|
||||
|
||||
|
||||
class _AddonReportInfo:
|
||||
def __init__(
|
||||
self, class_name, name, version, report_value_by_label
|
||||
):
|
||||
self.class_name = class_name
|
||||
self.name = name
|
||||
self.version = version
|
||||
self.report_value_by_label = report_value_by_label
|
||||
|
||||
@classmethod
|
||||
def from_addon(cls, addon, report):
|
||||
class_name = addon.__class__.__name__
|
||||
report_value_by_label = {
|
||||
label: reported.get(class_name)
|
||||
for label, reported in report.items()
|
||||
}
|
||||
return cls(
|
||||
addon.__class__.__name__,
|
||||
addon.name,
|
||||
addon.version,
|
||||
report_value_by_label
|
||||
)
|
||||
|
||||
|
||||
class AddonsManager:
|
||||
"""Manager of addons that helps to load and prepare them to work.
|
||||
|
||||
|
|
@ -859,10 +910,6 @@ class AddonsManager:
|
|||
name_alias = getattr(addon, "openpype_alias", None)
|
||||
if name_alias:
|
||||
aliased_names.append((name_alias, addon))
|
||||
enabled_str = "X"
|
||||
if not addon.enabled:
|
||||
enabled_str = " "
|
||||
self.log.debug("[{}] {}".format(enabled_str, name))
|
||||
|
||||
now = time.time()
|
||||
report[addon.__class__.__name__] = now - prev_start_time
|
||||
|
|
@ -874,6 +921,13 @@ class AddonsManager:
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
for addon_name in sorted(self._addons_by_name.keys()):
|
||||
addon = self._addons_by_name[addon_name]
|
||||
enabled_str = "X" if addon.enabled else " "
|
||||
self.log.debug(
|
||||
f"[{enabled_str}] {addon.name} ({addon.version})"
|
||||
)
|
||||
|
||||
for item in aliased_names:
|
||||
name_alias, addon = item
|
||||
if name_alias not in self._addons_by_name:
|
||||
|
|
@ -1162,39 +1216,55 @@ class AddonsManager:
|
|||
available_col_names |= set(addon_names.keys())
|
||||
|
||||
# Prepare ordered dictionary for columns
|
||||
cols = collections.OrderedDict()
|
||||
# Add addon names to first columnt
|
||||
cols["Addon name"] = list(sorted(
|
||||
addon.__class__.__name__
|
||||
addons_info = [
|
||||
_AddonReportInfo.from_addon(addon, self._report)
|
||||
for addon in self.addons
|
||||
if addon.__class__.__name__ in available_col_names
|
||||
))
|
||||
]
|
||||
addons_info.sort(key=lambda x: x.name)
|
||||
|
||||
addon_name_rows = [
|
||||
addon_info.name
|
||||
for addon_info in addons_info
|
||||
]
|
||||
addon_version_rows = [
|
||||
addon_info.version
|
||||
for addon_info in addons_info
|
||||
]
|
||||
|
||||
# Add total key (as last addon)
|
||||
cols["Addon name"].append(self._report_total_key)
|
||||
addon_name_rows.append(self._report_total_key)
|
||||
addon_version_rows.append(f"({len(addons_info)})")
|
||||
|
||||
cols = collections.OrderedDict()
|
||||
# Add addon names to first columnt
|
||||
cols["Addon name"] = addon_name_rows
|
||||
cols["Version"] = addon_version_rows
|
||||
|
||||
# Add columns from report
|
||||
total_by_addon = {
|
||||
row: 0
|
||||
for row in addon_name_rows
|
||||
}
|
||||
for label in self._report.keys():
|
||||
cols[label] = []
|
||||
|
||||
total_addon_times = {}
|
||||
for addon_name in cols["Addon name"]:
|
||||
total_addon_times[addon_name] = 0
|
||||
|
||||
for label, reported in self._report.items():
|
||||
for addon_name in cols["Addon name"]:
|
||||
col_time = reported.get(addon_name)
|
||||
if col_time is None:
|
||||
cols[label].append("N/A")
|
||||
rows = []
|
||||
col_total = 0
|
||||
for addon_info in addons_info:
|
||||
value = addon_info.report_value_by_label.get(label)
|
||||
if value is None:
|
||||
rows.append("N/A")
|
||||
continue
|
||||
cols[label].append("{:.3f}".format(col_time))
|
||||
total_addon_times[addon_name] += col_time
|
||||
|
||||
rows.append("{:.3f}".format(value))
|
||||
total_by_addon[addon_info.name] += value
|
||||
col_total += value
|
||||
total_by_addon[self._report_total_key] += col_total
|
||||
rows.append("{:.3f}".format(col_total))
|
||||
cols[label] = rows
|
||||
# Add to also total column that should sum the row
|
||||
cols[self._report_total_key] = []
|
||||
for addon_name in cols["Addon name"]:
|
||||
cols[self._report_total_key].append(
|
||||
"{:.3f}".format(total_addon_times[addon_name])
|
||||
)
|
||||
cols[self._report_total_key] = [
|
||||
"{:.3f}".format(total_by_addon[addon_name])
|
||||
for addon_name in cols["Addon name"]
|
||||
]
|
||||
|
||||
# Prepare column widths and total row count
|
||||
# - column width is by
|
||||
|
|
@ -1321,7 +1391,7 @@ class TrayAddonsManager(AddonsManager):
|
|||
self.doubleclick_callback = None
|
||||
|
||||
def add_doubleclick_callback(self, addon, callback):
|
||||
"""Register doubleclick callbacks on tray icon.
|
||||
"""Register double-click callbacks on tray icon.
|
||||
|
||||
Currently, there is no way how to determine which is launched. Name of
|
||||
callback can be defined with `doubleclick_callback` attribute.
|
||||
|
|
|
|||
|
|
@ -268,7 +268,7 @@ def main(*args, **kwargs):
|
|||
os.path.join(AYON_CORE_ROOT, "tools"),
|
||||
# add common AYON vendor
|
||||
# (common for multiple Python interpreter versions)
|
||||
os.path.join(AYON_CORE_ROOT, "vendor", "python", "common")
|
||||
os.path.join(AYON_CORE_ROOT, "vendor", "python")
|
||||
]
|
||||
for path in additional_paths:
|
||||
if path not in split_paths:
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
from .addon import BlenderAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"BlenderAddon",
|
||||
)
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
from .addon import (
|
||||
HOST_DIR,
|
||||
FlameAddon,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HOST_DIR",
|
||||
"FlameAddon",
|
||||
)
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
from .addon import (
|
||||
HIERO_ROOT_DIR,
|
||||
HieroAddon,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"HIERO_ROOT_DIR",
|
||||
"HieroAddon",
|
||||
)
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
"""Collector for pointcache types.
|
||||
|
||||
This will add additional family to pointcache instance based on
|
||||
the creator_identifier parameter.
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectPointcacheType(pyblish.api.InstancePlugin):
|
||||
"""Collect data type for pointcache instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["houdini"]
|
||||
families = ["pointcache"]
|
||||
label = "Collect type of pointcache"
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data["creator_identifier"] == "io.openpype.creators.houdini.bgeo": # noqa: E501
|
||||
instance.data["families"] += ["bgeo"]
|
||||
elif instance.data["creator_identifier"] == "io.openpype.creators.houdini.pointcache": # noqa: E501
|
||||
instance.data["families"] += ["abc"]
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
from .addon import ResolveAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ResolveAddon",
|
||||
)
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
from .addon import UnrealAddon
|
||||
|
||||
|
||||
__all__ = (
|
||||
"UnrealAddon",
|
||||
)
|
||||
|
|
@ -1,19 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# flake8: noqa E402
|
||||
"""AYON lib functions."""
|
||||
# add vendor to sys path based on Python version
|
||||
import sys
|
||||
import os
|
||||
import site
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
|
||||
# Add Python version specific vendor folder
|
||||
python_version_dir = os.path.join(
|
||||
AYON_CORE_ROOT, "vendor", "python", "python_{}".format(sys.version[0])
|
||||
)
|
||||
# Prepend path in sys paths
|
||||
sys.path.insert(0, python_version_dir)
|
||||
site.addsitedir(python_version_dir)
|
||||
|
||||
from .local_settings import (
|
||||
IniSettingRegistry,
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
from .clockify_module import ClockifyModule
|
||||
|
||||
__all__ = (
|
||||
"ClockifyModule",
|
||||
)
|
||||
|
|
@ -1 +0,0 @@
|
|||
__version__ = "0.1.10"
|
||||
|
|
@ -7,6 +7,7 @@ from ayon_core.addon import AYONAddon, ITrayAction
|
|||
class LauncherAction(AYONAddon, ITrayAction):
|
||||
label = "Launcher"
|
||||
name = "launcher_tool"
|
||||
version = "1.0.0"
|
||||
|
||||
def initialize(self, settings):
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ from ayon_core.addon import AYONAddon, ITrayAddon
|
|||
|
||||
class LoaderAddon(AYONAddon, ITrayAddon):
|
||||
name = "loader_tool"
|
||||
version = "1.0.0"
|
||||
|
||||
def initialize(self, settings):
|
||||
# Tray attributes
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from ayon_core.addon import AYONAddon, ITrayAction
|
|||
class PythonInterpreterAction(AYONAddon, ITrayAction):
|
||||
label = "Console"
|
||||
name = "python_interpreter"
|
||||
version = "1.0.0"
|
||||
admin_action = True
|
||||
|
||||
def initialize(self, settings):
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
from .version import __version__
|
||||
from .structures import HostMsgAction
|
||||
from .webserver_module import (
|
||||
WebServerAddon
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
"HostMsgAction",
|
||||
"WebServerAddon",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,22 +9,18 @@ from qtpy import QtWidgets
|
|||
from ayon_core.addon import ITrayService
|
||||
from ayon_core.tools.stdout_broker.window import ConsoleDialog
|
||||
|
||||
from .structures import HostMsgAction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Host listener icon type
|
||||
class IconType:
|
||||
IDLE = "idle"
|
||||
RUNNING = "running"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class MsgAction:
|
||||
CONNECTING = "connecting"
|
||||
INITIALIZED = "initialized"
|
||||
ADD = "add"
|
||||
CLOSE = "close"
|
||||
|
||||
|
||||
class HostListener:
|
||||
def __init__(self, webserver, module):
|
||||
self._window_per_id = {}
|
||||
|
|
@ -96,22 +92,22 @@ class HostListener:
|
|||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
host_name, action, text = self._parse_message(msg)
|
||||
|
||||
if action == MsgAction.CONNECTING:
|
||||
if action == HostMsgAction.CONNECTING:
|
||||
self._action_per_id[host_name] = None
|
||||
# must be sent to main thread, or action wont trigger
|
||||
self.module.execute_in_main_thread(
|
||||
lambda: self._host_is_connecting(host_name, text))
|
||||
elif action == MsgAction.CLOSE:
|
||||
elif action == HostMsgAction.CLOSE:
|
||||
# clean close
|
||||
self._close(host_name)
|
||||
await ws.close()
|
||||
elif action == MsgAction.INITIALIZED:
|
||||
elif action == HostMsgAction.INITIALIZED:
|
||||
self.module.execute_in_main_thread(
|
||||
# must be queued as _host_is_connecting might not
|
||||
# be triggered/finished yet
|
||||
lambda: self._set_host_icon(host_name,
|
||||
IconType.RUNNING))
|
||||
elif action == MsgAction.ADD:
|
||||
elif action == HostMsgAction.ADD:
|
||||
self.module.execute_in_main_thread(
|
||||
lambda: self._add_text(host_name, text))
|
||||
elif msg.type == aiohttp.WSMsgType.ERROR:
|
||||
|
|
|
|||
6
client/ayon_core/modules/webserver/structures.py
Normal file
6
client/ayon_core/modules/webserver/structures.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# Host listener message actions
|
||||
class HostMsgAction:
|
||||
CONNECTING = "connecting"
|
||||
INITIALIZED = "initialized"
|
||||
ADD = "add"
|
||||
CLOSE = "close"
|
||||
1
client/ayon_core/modules/webserver/version.py
Normal file
1
client/ayon_core/modules/webserver/version.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
__version__ = "1.0.0"
|
||||
|
|
@ -26,9 +26,12 @@ import socket
|
|||
from ayon_core import resources
|
||||
from ayon_core.addon import AYONAddon, ITrayService
|
||||
|
||||
from .version import __version__
|
||||
|
||||
|
||||
class WebServerAddon(AYONAddon, ITrayService):
|
||||
name = "webserver"
|
||||
version = __version__
|
||||
label = "WebServer"
|
||||
|
||||
webserver_url_env = "AYON_WEBSERVER_URL"
|
||||
|
|
|
|||
|
|
@ -11,7 +11,12 @@ from pyblish.lib import MessageHandler
|
|||
|
||||
from ayon_core import AYON_CORE_ROOT
|
||||
from ayon_core.host import HostBase
|
||||
from ayon_core.lib import is_in_tests, initialize_ayon_connection, emit_event
|
||||
from ayon_core.lib import (
|
||||
is_in_tests,
|
||||
initialize_ayon_connection,
|
||||
emit_event,
|
||||
version_up
|
||||
)
|
||||
from ayon_core.addon import load_addons, AddonsManager
|
||||
from ayon_core.settings import get_project_settings
|
||||
|
||||
|
|
@ -21,6 +26,8 @@ from .template_data import get_template_data_with_names
|
|||
from .workfile import (
|
||||
get_workdir,
|
||||
get_custom_workfile_template_by_string_context,
|
||||
get_workfile_template_key_from_context,
|
||||
get_last_workfile
|
||||
)
|
||||
from . import (
|
||||
register_loader_plugin_path,
|
||||
|
|
@ -579,3 +586,48 @@ def get_process_id():
|
|||
if _process_id is None:
|
||||
_process_id = str(uuid.uuid4())
|
||||
return _process_id
|
||||
|
||||
|
||||
def version_up_current_workfile():
|
||||
"""Function to increment and save workfile
|
||||
"""
|
||||
host = registered_host()
|
||||
if not host.has_unsaved_changes():
|
||||
print("No unsaved changes, skipping file save..")
|
||||
return
|
||||
|
||||
project_name = get_current_project_name()
|
||||
folder_path = get_current_folder_path()
|
||||
task_name = get_current_task_name()
|
||||
host_name = get_current_host_name()
|
||||
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
project_name,
|
||||
folder_path,
|
||||
task_name,
|
||||
host_name,
|
||||
)
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
data = get_template_data_with_names(
|
||||
project_name, folder_path, task_name, host_name
|
||||
)
|
||||
data["root"] = anatomy.roots
|
||||
|
||||
work_template = anatomy.get_template_item("work", template_key)
|
||||
|
||||
# Define saving file extension
|
||||
extensions = host.get_workfile_extensions()
|
||||
current_file = host.get_current_workfile()
|
||||
if current_file:
|
||||
extensions = [os.path.splitext(current_file)[-1]]
|
||||
|
||||
work_root = work_template["directory"].format_strict(data)
|
||||
file_template = work_template["file"].template
|
||||
last_workfile_path = get_last_workfile(
|
||||
work_root, file_template, data, extensions, True
|
||||
)
|
||||
new_workfile_path = version_up(last_workfile_path)
|
||||
if os.path.exists(new_workfile_path):
|
||||
new_workfile_path = version_up(new_workfile_path)
|
||||
host.save_workfile(new_workfile_path)
|
||||
|
|
|
|||
|
|
@ -681,7 +681,7 @@ class PublishAttributeValues(AttributeValues):
|
|||
|
||||
@property
|
||||
def parent(self):
|
||||
self.publish_attributes.parent
|
||||
return self.publish_attributes.parent
|
||||
|
||||
|
||||
class PublishAttributes:
|
||||
|
|
|
|||
|
|
@ -336,17 +336,16 @@ def get_plugin_settings(plugin, project_settings, log, category=None):
|
|||
settings_category = getattr(plugin, "settings_category", None)
|
||||
if settings_category:
|
||||
try:
|
||||
return (
|
||||
project_settings
|
||||
[settings_category]
|
||||
["publish"]
|
||||
[plugin.__name__]
|
||||
)
|
||||
category_settings = project_settings[settings_category]
|
||||
except KeyError:
|
||||
log.warning((
|
||||
"Couldn't find plugin '{}' settings"
|
||||
" under settings category '{}'"
|
||||
).format(plugin.__name__, settings_category))
|
||||
"Couldn't find settings category '{}' in project settings"
|
||||
).format(settings_category))
|
||||
return {}
|
||||
|
||||
try:
|
||||
return category_settings["publish"][plugin.__name__]
|
||||
except KeyError:
|
||||
return {}
|
||||
|
||||
# Use project settings based on a category name
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline.publish import PublishValidationError
|
||||
|
||||
from ayon_core.lib import filter_profiles
|
||||
from ayon_core.pipeline.publish import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from ayon_core.pipeline import get_current_host_name
|
||||
|
||||
|
||||
class ValidateVersion(pyblish.api.InstancePlugin):
|
||||
class ValidateVersion(pyblish.api.InstancePlugin, OptionalPyblishPluginMixin):
|
||||
"""Validate instance version.
|
||||
|
||||
AYON does not allow overwriting previously published versions.
|
||||
|
|
@ -11,13 +17,39 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
label = "Validate Version"
|
||||
hosts = ["nuke", "maya", "houdini", "blender",
|
||||
"photoshop", "aftereffects"]
|
||||
|
||||
optional = False
|
||||
active = True
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, settings):
|
||||
# Disable if no profile is found for the current host
|
||||
profiles = (
|
||||
settings
|
||||
["core"]
|
||||
["publish"]
|
||||
["ValidateVersion"]
|
||||
["plugin_state_profiles"]
|
||||
)
|
||||
profile = filter_profiles(
|
||||
profiles, {"host_names": get_current_host_name()}
|
||||
)
|
||||
if not profile:
|
||||
cls.enabled = False
|
||||
return
|
||||
|
||||
# Apply settings from profile
|
||||
for attr_name in {
|
||||
"enabled",
|
||||
"optional",
|
||||
"active",
|
||||
}:
|
||||
setattr(cls, attr_name, profile[attr_name])
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
version = instance.data.get("version")
|
||||
latest_version = instance.data.get("latestVersion")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
Adobe webserver
|
||||
---------------
|
||||
Aiohttp (Asyncio) based websocket server used for communication with host
|
||||
applications, currently only for Adobe (but could be used for any non python
|
||||
DCC which has websocket client).
|
||||
|
||||
This webserver is started in spawned Python process that opens DCC during
|
||||
its launch, waits for connection from DCC and handles communication going
|
||||
forward. Server is closed before Python process is killed.
|
||||
|
||||
(Different from `ayon_core/modules/webserver` as that one is running in Tray,
|
||||
this one is running in spawn Python process.)
|
||||
|
|
@ -14,6 +14,7 @@ from .hierarchy import (
|
|||
)
|
||||
from .thumbnails import ThumbnailsModel
|
||||
from .selection import HierarchyExpectedSelection
|
||||
from .users import UsersModel
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
|
@ -32,4 +33,6 @@ __all__ = (
|
|||
"ThumbnailsModel",
|
||||
|
||||
"HierarchyExpectedSelection",
|
||||
|
||||
"UsersModel",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import ayon_api
|
|||
import six
|
||||
|
||||
from ayon_core.style import get_default_entity_icon_color
|
||||
from ayon_core.lib import CacheItem
|
||||
from ayon_core.lib import CacheItem, NestedCacheItem
|
||||
|
||||
PROJECTS_MODEL_SENDER = "projects.model"
|
||||
|
||||
|
|
@ -17,6 +17,49 @@ class AbstractHierarchyController:
|
|||
pass
|
||||
|
||||
|
||||
class StatusItem:
|
||||
"""Item representing status of project.
|
||||
|
||||
Args:
|
||||
name (str): Status name ("Not ready").
|
||||
color (str): Status color in hex ("#434a56").
|
||||
short (str): Short status name ("NRD").
|
||||
icon (str): Icon name in MaterialIcons ("fiber_new").
|
||||
state (Literal["not_started", "in_progress", "done", "blocked"]):
|
||||
Status state.
|
||||
|
||||
"""
|
||||
def __init__(self, name, color, short, icon, state):
|
||||
self.name = name
|
||||
self.color = color
|
||||
self.short = short
|
||||
self.icon = icon
|
||||
self.state = state
|
||||
|
||||
def to_data(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"color": self.color,
|
||||
"short": self.short,
|
||||
"icon": self.icon,
|
||||
"state": self.state,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_data(cls, data):
|
||||
return cls(**data)
|
||||
|
||||
@classmethod
|
||||
def from_project_item(cls, status_data):
|
||||
return cls(
|
||||
name=status_data["name"],
|
||||
color=status_data["color"],
|
||||
short=status_data["shortName"],
|
||||
icon=status_data["icon"],
|
||||
state=status_data["state"],
|
||||
)
|
||||
|
||||
|
||||
class ProjectItem:
|
||||
"""Item representing folder entity on a server.
|
||||
|
||||
|
|
@ -40,6 +83,23 @@ class ProjectItem:
|
|||
}
|
||||
self.icon = icon
|
||||
|
||||
@classmethod
|
||||
def from_entity(cls, project_entity):
|
||||
"""Creates folder item from entity.
|
||||
|
||||
Args:
|
||||
project_entity (dict[str, Any]): Project entity.
|
||||
|
||||
Returns:
|
||||
ProjectItem: Project item.
|
||||
|
||||
"""
|
||||
return cls(
|
||||
project_entity["name"],
|
||||
project_entity["active"],
|
||||
project_entity["library"],
|
||||
)
|
||||
|
||||
def to_data(self):
|
||||
"""Converts folder item to data.
|
||||
|
||||
|
|
@ -79,7 +139,7 @@ def _get_project_items_from_entitiy(projects):
|
|||
"""
|
||||
|
||||
return [
|
||||
ProjectItem(project["name"], project["active"], project["library"])
|
||||
ProjectItem.from_entity(project)
|
||||
for project in projects
|
||||
]
|
||||
|
||||
|
|
@ -87,18 +147,29 @@ def _get_project_items_from_entitiy(projects):
|
|||
class ProjectsModel(object):
|
||||
def __init__(self, controller):
|
||||
self._projects_cache = CacheItem(default_factory=list)
|
||||
self._project_items_by_name = {}
|
||||
self._projects_by_name = {}
|
||||
self._project_statuses_cache = NestedCacheItem(
|
||||
levels=1, default_factory=list
|
||||
)
|
||||
self._projects_by_name = NestedCacheItem(
|
||||
levels=1, default_factory=list
|
||||
)
|
||||
|
||||
self._is_refreshing = False
|
||||
self._controller = controller
|
||||
|
||||
def reset(self):
|
||||
self._projects_cache.reset()
|
||||
self._project_items_by_name = {}
|
||||
self._projects_by_name = {}
|
||||
self._project_statuses_cache.reset()
|
||||
self._projects_by_name.reset()
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh project items.
|
||||
|
||||
This method will requery list of ProjectItem returned by
|
||||
'get_project_items'.
|
||||
|
||||
To reset all cached items use 'reset' method.
|
||||
"""
|
||||
self._refresh_projects_cache()
|
||||
|
||||
def get_project_items(self, sender):
|
||||
|
|
@ -117,12 +188,51 @@ class ProjectsModel(object):
|
|||
return self._projects_cache.get_data()
|
||||
|
||||
def get_project_entity(self, project_name):
|
||||
if project_name not in self._projects_by_name:
|
||||
"""Get project entity.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
|
||||
Returns:
|
||||
Union[dict[str, Any], None]: Project entity or None if project
|
||||
was not found by name.
|
||||
|
||||
"""
|
||||
project_cache = self._projects_by_name[project_name]
|
||||
if not project_cache.is_valid:
|
||||
entity = None
|
||||
if project_name:
|
||||
entity = ayon_api.get_project(project_name)
|
||||
self._projects_by_name[project_name] = entity
|
||||
return self._projects_by_name[project_name]
|
||||
project_cache.update_data(entity)
|
||||
return project_cache.get_data()
|
||||
|
||||
def get_project_status_items(self, project_name, sender):
|
||||
"""Get project status items.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name.
|
||||
sender (Union[str, None]): Name of sender who asked for items.
|
||||
|
||||
Returns:
|
||||
list[StatusItem]: Status items for project.
|
||||
|
||||
"""
|
||||
statuses_cache = self._project_statuses_cache[project_name]
|
||||
if not statuses_cache.is_valid:
|
||||
with self._project_statuses_refresh_event_manager(
|
||||
sender, project_name
|
||||
):
|
||||
project_entity = None
|
||||
if project_name:
|
||||
project_entity = self.get_project_entity(project_name)
|
||||
statuses = []
|
||||
if project_entity:
|
||||
statuses = [
|
||||
StatusItem.from_project_item(status)
|
||||
for status in project_entity["statuses"]
|
||||
]
|
||||
statuses_cache.update_data(statuses)
|
||||
return statuses_cache.get_data()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _project_refresh_event_manager(self, sender):
|
||||
|
|
@ -143,6 +253,23 @@ class ProjectsModel(object):
|
|||
)
|
||||
self._is_refreshing = False
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _project_statuses_refresh_event_manager(self, sender, project_name):
|
||||
self._controller.emit_event(
|
||||
"projects.statuses.refresh.started",
|
||||
{"sender": sender, "project_name": project_name},
|
||||
PROJECTS_MODEL_SENDER
|
||||
)
|
||||
try:
|
||||
yield
|
||||
|
||||
finally:
|
||||
self._controller.emit_event(
|
||||
"projects.statuses.refresh.finished",
|
||||
{"sender": sender, "project_name": project_name},
|
||||
PROJECTS_MODEL_SENDER
|
||||
)
|
||||
|
||||
def _refresh_projects_cache(self, sender=None):
|
||||
if self._is_refreshing:
|
||||
return None
|
||||
|
|
|
|||
84
client/ayon_core/tools/common_models/users.py
Normal file
84
client/ayon_core/tools/common_models/users.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.lib import CacheItem
|
||||
|
||||
|
||||
class UserItem:
|
||||
def __init__(
|
||||
self,
|
||||
username,
|
||||
full_name,
|
||||
email,
|
||||
avatar_url,
|
||||
active,
|
||||
):
|
||||
self.username = username
|
||||
self.full_name = full_name
|
||||
self.email = email
|
||||
self.avatar_url = avatar_url
|
||||
self.active = active
|
||||
|
||||
@classmethod
|
||||
def from_entity_data(cls, user_data):
|
||||
return cls(
|
||||
user_data["name"],
|
||||
user_data["attrib"]["fullName"],
|
||||
user_data["attrib"]["email"],
|
||||
user_data["attrib"]["avatarUrl"],
|
||||
user_data["active"],
|
||||
)
|
||||
|
||||
|
||||
class UsersModel:
|
||||
def __init__(self, controller):
|
||||
self._controller = controller
|
||||
self._users_cache = CacheItem(default_factory=list)
|
||||
|
||||
def get_user_items(self):
|
||||
"""Get user items.
|
||||
|
||||
Returns:
|
||||
List[UserItem]: List of user items.
|
||||
|
||||
"""
|
||||
self._invalidate_cache()
|
||||
return self._users_cache.get_data()
|
||||
|
||||
def get_user_items_by_name(self):
|
||||
"""Get user items by name.
|
||||
|
||||
Implemented as most of cases using this model will need to find
|
||||
user information by username.
|
||||
|
||||
Returns:
|
||||
Dict[str, UserItem]: Dictionary of user items by name.
|
||||
|
||||
"""
|
||||
return {
|
||||
user_item.username: user_item
|
||||
for user_item in self.get_user_items()
|
||||
}
|
||||
|
||||
def get_user_item_by_username(self, username):
|
||||
"""Get user item by username.
|
||||
|
||||
Args:
|
||||
username (str): Username.
|
||||
|
||||
Returns:
|
||||
Union[UserItem, None]: User item or None if not found.
|
||||
|
||||
"""
|
||||
self._invalidate_cache()
|
||||
for user_item in self.get_user_items():
|
||||
if user_item.username == username:
|
||||
return user_item
|
||||
return None
|
||||
|
||||
def _invalidate_cache(self):
|
||||
if self._users_cache.is_valid:
|
||||
return
|
||||
self._users_cache.update_data([
|
||||
UserItem.from_entity_data(user)
|
||||
for user in ayon_api.get_users()
|
||||
])
|
||||
|
|
@ -290,6 +290,34 @@ class ActionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
painter.drawPixmap(extender_x, extender_y, pix)
|
||||
|
||||
|
||||
class ActionsProxyModel(QtCore.QSortFilterProxyModel):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
def lessThan(self, left, right):
|
||||
# Sort by action order and then by label
|
||||
left_value = left.data(ACTION_SORT_ROLE)
|
||||
right_value = right.data(ACTION_SORT_ROLE)
|
||||
|
||||
# Values are same -> use super sorting
|
||||
if left_value == right_value:
|
||||
# Default behavior is using DisplayRole
|
||||
return super().lessThan(left, right)
|
||||
|
||||
# Validate 'None' values
|
||||
if right_value is None:
|
||||
return True
|
||||
if left_value is None:
|
||||
return False
|
||||
# Sort values and handle incompatible types
|
||||
try:
|
||||
return left_value < right_value
|
||||
except TypeError:
|
||||
return True
|
||||
|
||||
|
||||
class ActionsWidget(QtWidgets.QWidget):
|
||||
def __init__(self, controller, parent):
|
||||
super(ActionsWidget, self).__init__(parent)
|
||||
|
|
@ -316,10 +344,7 @@ class ActionsWidget(QtWidgets.QWidget):
|
|||
|
||||
model = ActionsQtModel(controller)
|
||||
|
||||
proxy_model = QtCore.QSortFilterProxyModel()
|
||||
proxy_model.setSortCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
proxy_model.setSortRole(ACTION_SORT_ROLE)
|
||||
|
||||
proxy_model = ActionsProxyModel()
|
||||
proxy_model.setSourceModel(model)
|
||||
view.setModel(proxy_model)
|
||||
|
||||
|
|
|
|||
|
|
@ -114,6 +114,7 @@ class VersionItem:
|
|||
thumbnail_id (Union[str, None]): Thumbnail id.
|
||||
published_time (Union[str, None]): Published time in format
|
||||
'%Y%m%dT%H%M%SZ'.
|
||||
status (Union[str, None]): Status name.
|
||||
author (Union[str, None]): Author.
|
||||
frame_range (Union[str, None]): Frame range.
|
||||
duration (Union[int, None]): Duration.
|
||||
|
|
@ -132,6 +133,7 @@ class VersionItem:
|
|||
thumbnail_id,
|
||||
published_time,
|
||||
author,
|
||||
status,
|
||||
frame_range,
|
||||
duration,
|
||||
handles,
|
||||
|
|
@ -146,6 +148,7 @@ class VersionItem:
|
|||
self.is_hero = is_hero
|
||||
self.published_time = published_time
|
||||
self.author = author
|
||||
self.status = status
|
||||
self.frame_range = frame_range
|
||||
self.duration = duration
|
||||
self.handles = handles
|
||||
|
|
@ -169,12 +172,30 @@ class VersionItem:
|
|||
def __gt__(self, other):
|
||||
if not isinstance(other, VersionItem):
|
||||
return False
|
||||
if (
|
||||
other.version == self.version
|
||||
and self.is_hero
|
||||
):
|
||||
# Make sure hero versions are positive
|
||||
version = abs(self.version)
|
||||
other_version = abs(other.version)
|
||||
# Hero version is greater than non-hero
|
||||
if version == other_version:
|
||||
return self.is_hero
|
||||
return version > other_version
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, VersionItem):
|
||||
return True
|
||||
return other.version < self.version
|
||||
# Make sure hero versions are positive
|
||||
version = abs(self.version)
|
||||
other_version = abs(other.version)
|
||||
# Non-hero version is lesser than hero
|
||||
if version == other_version:
|
||||
return not self.is_hero
|
||||
return version < other_version
|
||||
|
||||
def __ge__(self, other):
|
||||
return self.__eq__(other) or self.__gt__(other)
|
||||
|
||||
def __le__(self, other):
|
||||
return self.__eq__(other) or self.__lt__(other)
|
||||
|
||||
def to_data(self):
|
||||
return {
|
||||
|
|
@ -185,6 +206,7 @@ class VersionItem:
|
|||
"is_hero": self.is_hero,
|
||||
"published_time": self.published_time,
|
||||
"author": self.author,
|
||||
"status": self.status,
|
||||
"frame_range": self.frame_range,
|
||||
"duration": self.duration,
|
||||
"handles": self.handles,
|
||||
|
|
@ -488,6 +510,27 @@ class FrontendLoaderController(_BaseLoaderController):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_project_status_items(self, project_name, sender=None):
|
||||
"""Items for all projects available on server.
|
||||
|
||||
Triggers event topics "projects.statuses.refresh.started" and
|
||||
"projects.statuses.refresh.finished" with data:
|
||||
{
|
||||
"sender": sender,
|
||||
"project_name": project_name
|
||||
}
|
||||
|
||||
Args:
|
||||
project_name (Union[str, None]): Project name.
|
||||
sender (Optional[str]): Sender who requested the items.
|
||||
|
||||
Returns:
|
||||
list[StatusItem]: List of status items.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_product_items(self, project_name, folder_ids, sender=None):
|
||||
"""Product items for folder ids.
|
||||
|
|
|
|||
|
|
@ -180,6 +180,11 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
|||
def get_project_items(self, sender=None):
|
||||
return self._projects_model.get_project_items(sender)
|
||||
|
||||
def get_project_status_items(self, project_name, sender=None):
|
||||
return self._projects_model.get_project_status_items(
|
||||
project_name, sender
|
||||
)
|
||||
|
||||
def get_folder_items(self, project_name, sender=None):
|
||||
return self._hierarchy_model.get_folder_items(project_name, sender)
|
||||
|
||||
|
|
@ -343,10 +348,18 @@ class LoaderController(BackendLoaderController, FrontendLoaderController):
|
|||
return set()
|
||||
|
||||
if not self._loaded_products_cache.is_valid:
|
||||
if isinstance(self._host, ILoadHost):
|
||||
containers = self._host.get_containers()
|
||||
else:
|
||||
containers = self._host.ls()
|
||||
try:
|
||||
if isinstance(self._host, ILoadHost):
|
||||
containers = self._host.get_containers()
|
||||
else:
|
||||
containers = self._host.ls()
|
||||
|
||||
except BaseException:
|
||||
self.log.error(
|
||||
"Failed to collect loaded products.", exc_info=True
|
||||
)
|
||||
containers = []
|
||||
|
||||
repre_ids = set()
|
||||
for container in containers:
|
||||
repre_id = container.get("representation")
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ def version_item_from_entity(version):
|
|||
thumbnail_id=version["thumbnailId"],
|
||||
published_time=published_time,
|
||||
author=author,
|
||||
status=version["status"],
|
||||
frame_range=frame_range,
|
||||
duration=duration,
|
||||
handles=handles,
|
||||
|
|
@ -526,8 +527,11 @@ class ProductsModel:
|
|||
products = list(ayon_api.get_products(project_name, **kwargs))
|
||||
product_ids = {product["id"] for product in products}
|
||||
|
||||
# Add 'status' to fields -> fixed in ayon-python-api 1.0.4
|
||||
fields = ayon_api.get_default_fields_for_type("version")
|
||||
fields.add("status")
|
||||
versions = ayon_api.get_versions(
|
||||
project_name, product_ids=product_ids
|
||||
project_name, product_ids=product_ids, fields=fields
|
||||
)
|
||||
|
||||
return self._create_product_items(
|
||||
|
|
|
|||
|
|
@ -321,6 +321,8 @@ class LoaderFoldersWidget(QtWidgets.QWidget):
|
|||
"""
|
||||
|
||||
self._folders_proxy_model.setFilterFixedString(name)
|
||||
if name:
|
||||
self._folders_view.expandAll()
|
||||
|
||||
def set_merged_products_selection(self, items):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -104,7 +104,10 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
style = QtWidgets.QApplication.style()
|
||||
|
||||
style.drawControl(
|
||||
style.CE_ItemViewItem, option, painter, option.widget
|
||||
QtWidgets.QCommonStyle.CE_ItemViewItem,
|
||||
option,
|
||||
painter,
|
||||
option.widget
|
||||
)
|
||||
|
||||
painter.save()
|
||||
|
|
@ -116,9 +119,14 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
pen.setColor(fg_color)
|
||||
painter.setPen(pen)
|
||||
|
||||
text_rect = style.subElementRect(style.SE_ItemViewItemText, option)
|
||||
text_rect = style.subElementRect(
|
||||
QtWidgets.QCommonStyle.SE_ItemViewItemText,
|
||||
option
|
||||
)
|
||||
text_margin = style.proxy().pixelMetric(
|
||||
style.PM_FocusFrameHMargin, option, option.widget
|
||||
QtWidgets.QCommonStyle.PM_FocusFrameHMargin,
|
||||
option,
|
||||
option.widget
|
||||
) + 1
|
||||
|
||||
painter.drawText(
|
||||
|
|
|
|||
|
|
@ -22,18 +22,22 @@ VERSION_HERO_ROLE = QtCore.Qt.UserRole + 11
|
|||
VERSION_NAME_ROLE = QtCore.Qt.UserRole + 12
|
||||
VERSION_NAME_EDIT_ROLE = QtCore.Qt.UserRole + 13
|
||||
VERSION_PUBLISH_TIME_ROLE = QtCore.Qt.UserRole + 14
|
||||
VERSION_AUTHOR_ROLE = QtCore.Qt.UserRole + 15
|
||||
VERSION_FRAME_RANGE_ROLE = QtCore.Qt.UserRole + 16
|
||||
VERSION_DURATION_ROLE = QtCore.Qt.UserRole + 17
|
||||
VERSION_HANDLES_ROLE = QtCore.Qt.UserRole + 18
|
||||
VERSION_STEP_ROLE = QtCore.Qt.UserRole + 19
|
||||
VERSION_AVAILABLE_ROLE = QtCore.Qt.UserRole + 20
|
||||
VERSION_THUMBNAIL_ID_ROLE = QtCore.Qt.UserRole + 21
|
||||
ACTIVE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 22
|
||||
REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 23
|
||||
REPRESENTATIONS_COUNT_ROLE = QtCore.Qt.UserRole + 24
|
||||
SYNC_ACTIVE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 25
|
||||
SYNC_REMOTE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 26
|
||||
VERSION_STATUS_NAME_ROLE = QtCore.Qt.UserRole + 15
|
||||
VERSION_STATUS_SHORT_ROLE = QtCore.Qt.UserRole + 16
|
||||
VERSION_STATUS_COLOR_ROLE = QtCore.Qt.UserRole + 17
|
||||
VERSION_STATUS_ICON_ROLE = QtCore.Qt.UserRole + 18
|
||||
VERSION_AUTHOR_ROLE = QtCore.Qt.UserRole + 19
|
||||
VERSION_FRAME_RANGE_ROLE = QtCore.Qt.UserRole + 20
|
||||
VERSION_DURATION_ROLE = QtCore.Qt.UserRole + 21
|
||||
VERSION_HANDLES_ROLE = QtCore.Qt.UserRole + 22
|
||||
VERSION_STEP_ROLE = QtCore.Qt.UserRole + 23
|
||||
VERSION_AVAILABLE_ROLE = QtCore.Qt.UserRole + 24
|
||||
VERSION_THUMBNAIL_ID_ROLE = QtCore.Qt.UserRole + 25
|
||||
ACTIVE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 26
|
||||
REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 27
|
||||
REPRESENTATIONS_COUNT_ROLE = QtCore.Qt.UserRole + 28
|
||||
SYNC_ACTIVE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 29
|
||||
SYNC_REMOTE_SITE_AVAILABILITY = QtCore.Qt.UserRole + 30
|
||||
|
||||
|
||||
class ProductsModel(QtGui.QStandardItemModel):
|
||||
|
|
@ -44,6 +48,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
"Product type",
|
||||
"Folder",
|
||||
"Version",
|
||||
"Status",
|
||||
"Time",
|
||||
"Author",
|
||||
"Frames",
|
||||
|
|
@ -69,11 +74,35 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
]
|
||||
]
|
||||
|
||||
version_col = column_labels.index("Version")
|
||||
published_time_col = column_labels.index("Time")
|
||||
product_name_col = column_labels.index("Product name")
|
||||
product_type_col = column_labels.index("Product type")
|
||||
folders_label_col = column_labels.index("Folder")
|
||||
version_col = column_labels.index("Version")
|
||||
status_col = column_labels.index("Status")
|
||||
published_time_col = column_labels.index("Time")
|
||||
author_col = column_labels.index("Author")
|
||||
frame_range_col = column_labels.index("Frames")
|
||||
duration_col = column_labels.index("Duration")
|
||||
handles_col = column_labels.index("Handles")
|
||||
step_col = column_labels.index("Step")
|
||||
in_scene_col = column_labels.index("In scene")
|
||||
sitesync_avail_col = column_labels.index("Availability")
|
||||
_display_role_mapping = {
|
||||
product_name_col: QtCore.Qt.DisplayRole,
|
||||
product_type_col: PRODUCT_TYPE_ROLE,
|
||||
folders_label_col: FOLDER_LABEL_ROLE,
|
||||
version_col: VERSION_NAME_ROLE,
|
||||
status_col: VERSION_STATUS_NAME_ROLE,
|
||||
published_time_col: VERSION_PUBLISH_TIME_ROLE,
|
||||
author_col: VERSION_AUTHOR_ROLE,
|
||||
frame_range_col: VERSION_FRAME_RANGE_ROLE,
|
||||
duration_col: VERSION_DURATION_ROLE,
|
||||
handles_col: VERSION_HANDLES_ROLE,
|
||||
step_col: VERSION_STEP_ROLE,
|
||||
in_scene_col: PRODUCT_IN_SCENE_ROLE,
|
||||
sitesync_avail_col: VERSION_AVAILABLE_ROLE,
|
||||
|
||||
}
|
||||
|
||||
def __init__(self, controller):
|
||||
super(ProductsModel, self).__init__()
|
||||
|
|
@ -96,6 +125,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
|
||||
self._last_project_name = None
|
||||
self._last_folder_ids = []
|
||||
self._last_project_statuses = {}
|
||||
|
||||
def get_product_item_indexes(self):
|
||||
return [
|
||||
|
|
@ -141,6 +171,15 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
if not index.isValid():
|
||||
return None
|
||||
|
||||
if role in (VERSION_STATUS_SHORT_ROLE, VERSION_STATUS_COLOR_ROLE):
|
||||
status_name = self.data(index, VERSION_STATUS_NAME_ROLE)
|
||||
status_item = self._last_project_statuses.get(status_name)
|
||||
if status_item is None:
|
||||
return ""
|
||||
if role == VERSION_STATUS_SHORT_ROLE:
|
||||
return status_item.short
|
||||
return status_item.color
|
||||
|
||||
col = index.column()
|
||||
if col == 0:
|
||||
return super(ProductsModel, self).data(index, role)
|
||||
|
|
@ -160,7 +199,9 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
product_item = self._product_items_by_id.get(product_id)
|
||||
if product_item is None:
|
||||
return None
|
||||
return list(product_item.version_items.values())
|
||||
product_items = list(product_item.version_items.values())
|
||||
product_items.sort(reverse=True)
|
||||
return product_items
|
||||
|
||||
if role == QtCore.Qt.EditRole:
|
||||
return None
|
||||
|
|
@ -168,29 +209,8 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
if role == QtCore.Qt.DisplayRole:
|
||||
if not index.data(PRODUCT_ID_ROLE):
|
||||
return None
|
||||
if col == self.version_col:
|
||||
role = VERSION_NAME_ROLE
|
||||
elif col == 1:
|
||||
role = PRODUCT_TYPE_ROLE
|
||||
elif col == 2:
|
||||
role = FOLDER_LABEL_ROLE
|
||||
elif col == 4:
|
||||
role = VERSION_PUBLISH_TIME_ROLE
|
||||
elif col == 5:
|
||||
role = VERSION_AUTHOR_ROLE
|
||||
elif col == 6:
|
||||
role = VERSION_FRAME_RANGE_ROLE
|
||||
elif col == 7:
|
||||
role = VERSION_DURATION_ROLE
|
||||
elif col == 8:
|
||||
role = VERSION_HANDLES_ROLE
|
||||
elif col == 9:
|
||||
role = VERSION_STEP_ROLE
|
||||
elif col == 10:
|
||||
role = PRODUCT_IN_SCENE_ROLE
|
||||
elif col == 11:
|
||||
role = VERSION_AVAILABLE_ROLE
|
||||
else:
|
||||
role = self._display_role_mapping.get(col)
|
||||
if role is None:
|
||||
return None
|
||||
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
|
|
@ -312,6 +332,7 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
version_item.published_time, VERSION_PUBLISH_TIME_ROLE
|
||||
)
|
||||
model_item.setData(version_item.author, VERSION_AUTHOR_ROLE)
|
||||
model_item.setData(version_item.status, VERSION_STATUS_NAME_ROLE)
|
||||
model_item.setData(version_item.frame_range, VERSION_FRAME_RANGE_ROLE)
|
||||
model_item.setData(version_item.duration, VERSION_DURATION_ROLE)
|
||||
model_item.setData(version_item.handles, VERSION_HANDLES_ROLE)
|
||||
|
|
@ -393,6 +414,11 @@ class ProductsModel(QtGui.QStandardItemModel):
|
|||
|
||||
self._last_project_name = project_name
|
||||
self._last_folder_ids = folder_ids
|
||||
status_items = self._controller.get_project_status_items(project_name)
|
||||
self._last_project_statuses = {
|
||||
status_item.name: status_item
|
||||
for status_item in status_items
|
||||
}
|
||||
|
||||
active_site_icon_def = self._controller.get_active_site_icon_def(
|
||||
project_name
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from ayon_core.tools.utils import (
|
|||
RecursiveSortFilterProxyModel,
|
||||
DeselectableTreeView,
|
||||
)
|
||||
from ayon_core.tools.utils.delegates import PrettyTimeDelegate
|
||||
from ayon_core.tools.utils.delegates import PrettyTimeDelegate, StatusDelegate
|
||||
|
||||
from .products_model import (
|
||||
ProductsModel,
|
||||
|
|
@ -17,12 +17,16 @@ from .products_model import (
|
|||
FOLDER_ID_ROLE,
|
||||
PRODUCT_ID_ROLE,
|
||||
VERSION_ID_ROLE,
|
||||
VERSION_STATUS_NAME_ROLE,
|
||||
VERSION_STATUS_SHORT_ROLE,
|
||||
VERSION_STATUS_COLOR_ROLE,
|
||||
VERSION_STATUS_ICON_ROLE,
|
||||
VERSION_THUMBNAIL_ID_ROLE,
|
||||
)
|
||||
from .products_delegates import (
|
||||
VersionDelegate,
|
||||
LoadedInSceneDelegate,
|
||||
SiteSyncDelegate
|
||||
SiteSyncDelegate,
|
||||
)
|
||||
from .actions_utils import show_actions_menu
|
||||
|
||||
|
|
@ -89,6 +93,7 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
90, # Product type
|
||||
130, # Folder label
|
||||
60, # Version
|
||||
100, # Status
|
||||
125, # Time
|
||||
75, # Author
|
||||
75, # Frames
|
||||
|
|
@ -128,20 +133,24 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
products_view.setColumnWidth(idx, width)
|
||||
|
||||
version_delegate = VersionDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.version_col, version_delegate)
|
||||
|
||||
time_delegate = PrettyTimeDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.published_time_col, time_delegate)
|
||||
|
||||
status_delegate = StatusDelegate(
|
||||
VERSION_STATUS_NAME_ROLE,
|
||||
VERSION_STATUS_SHORT_ROLE,
|
||||
VERSION_STATUS_COLOR_ROLE,
|
||||
VERSION_STATUS_ICON_ROLE,
|
||||
)
|
||||
in_scene_delegate = LoadedInSceneDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.in_scene_col, in_scene_delegate)
|
||||
|
||||
sitesync_delegate = SiteSyncDelegate()
|
||||
products_view.setItemDelegateForColumn(
|
||||
products_model.sitesync_avail_col, sitesync_delegate)
|
||||
|
||||
for col, delegate in (
|
||||
(products_model.version_col, version_delegate),
|
||||
(products_model.published_time_col, time_delegate),
|
||||
(products_model.status_col, status_delegate),
|
||||
(products_model.in_scene_col, in_scene_delegate),
|
||||
(products_model.sitesync_avail_col, sitesync_delegate),
|
||||
):
|
||||
products_view.setItemDelegateForColumn(col, delegate)
|
||||
|
||||
main_layout = QtWidgets.QHBoxLayout(self)
|
||||
main_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
|
@ -175,6 +184,7 @@ class ProductsWidget(QtWidgets.QWidget):
|
|||
|
||||
self._version_delegate = version_delegate
|
||||
self._time_delegate = time_delegate
|
||||
self._status_delegate = status_delegate
|
||||
self._in_scene_delegate = in_scene_delegate
|
||||
self._sitesync_delegate = sitesync_delegate
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.lib.events import QueuedEventSystem
|
||||
from ayon_core.host import ILoadHost
|
||||
from ayon_core.host import HostBase
|
||||
from ayon_core.pipeline import (
|
||||
registered_host,
|
||||
get_current_context,
|
||||
)
|
||||
from ayon_core.tools.common_models import HierarchyModel
|
||||
from ayon_core.tools.common_models import HierarchyModel, ProjectsModel
|
||||
|
||||
from .models import SiteSyncModel
|
||||
from .models import SiteSyncModel, ContainersModel
|
||||
|
||||
|
||||
class SceneInventoryController:
|
||||
|
|
@ -28,11 +28,16 @@ class SceneInventoryController:
|
|||
self._current_folder_id = None
|
||||
self._current_folder_set = False
|
||||
|
||||
self._containers_model = ContainersModel(self)
|
||||
self._sitesync_model = SiteSyncModel(self)
|
||||
# Switch dialog requirements
|
||||
self._hierarchy_model = HierarchyModel(self)
|
||||
self._projects_model = ProjectsModel(self)
|
||||
self._event_system = self._create_event_system()
|
||||
|
||||
def get_host(self) -> HostBase:
|
||||
return self._host
|
||||
|
||||
def emit_event(self, topic, data=None, source=None):
|
||||
if data is None:
|
||||
data = {}
|
||||
|
|
@ -47,6 +52,7 @@ class SceneInventoryController:
|
|||
self._current_folder_id = None
|
||||
self._current_folder_set = False
|
||||
|
||||
self._containers_model.reset()
|
||||
self._sitesync_model.reset()
|
||||
self._hierarchy_model.reset()
|
||||
|
||||
|
|
@ -80,13 +86,32 @@ class SceneInventoryController:
|
|||
self._current_folder_set = True
|
||||
return self._current_folder_id
|
||||
|
||||
def get_project_status_items(self):
|
||||
project_name = self.get_current_project_name()
|
||||
return self._projects_model.get_project_status_items(
|
||||
project_name, None
|
||||
)
|
||||
|
||||
# Containers methods
|
||||
def get_containers(self):
|
||||
host = self._host
|
||||
if isinstance(host, ILoadHost):
|
||||
return list(host.get_containers())
|
||||
elif hasattr(host, "ls"):
|
||||
return list(host.ls())
|
||||
return []
|
||||
return self._containers_model.get_containers()
|
||||
|
||||
def get_containers_by_item_ids(self, item_ids):
|
||||
return self._containers_model.get_containers_by_item_ids(item_ids)
|
||||
|
||||
def get_container_items(self):
|
||||
return self._containers_model.get_container_items()
|
||||
|
||||
def get_container_items_by_id(self, item_ids):
|
||||
return self._containers_model.get_container_items_by_id(item_ids)
|
||||
|
||||
def get_representation_info_items(self, representation_ids):
|
||||
return self._containers_model.get_representation_info_items(
|
||||
representation_ids
|
||||
)
|
||||
|
||||
def get_version_items(self, product_ids):
|
||||
return self._containers_model.get_version_items(product_ids)
|
||||
|
||||
# Site Sync methods
|
||||
def is_sitesync_enabled(self):
|
||||
|
|
|
|||
|
|
@ -1,38 +1,10 @@
|
|||
import numbers
|
||||
|
||||
import ayon_api
|
||||
|
||||
from ayon_core.pipeline import HeroVersionType
|
||||
from ayon_core.tools.utils.models import TreeModel
|
||||
from ayon_core.tools.utils.lib import format_version
|
||||
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from .model import VERSION_LABEL_ROLE
|
||||
|
||||
|
||||
class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""A delegate that display version integer formatted as version string."""
|
||||
|
||||
version_changed = QtCore.Signal()
|
||||
first_run = False
|
||||
lock = False
|
||||
|
||||
def __init__(self, controller, *args, **kwargs):
|
||||
self._controller = controller
|
||||
super(VersionDelegate, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_project_name(self):
|
||||
return self._controller.get_current_project_name()
|
||||
|
||||
def displayText(self, value, locale):
|
||||
if isinstance(value, HeroVersionType):
|
||||
return format_version(value)
|
||||
if not isinstance(value, numbers.Integral):
|
||||
# For cases where no version is resolved like NOT FOUND cases
|
||||
# where a representation might not exist in current database
|
||||
return
|
||||
|
||||
return format_version(value)
|
||||
|
||||
def paint(self, painter, option, index):
|
||||
fg_color = index.data(QtCore.Qt.ForegroundRole)
|
||||
if fg_color:
|
||||
|
|
@ -44,7 +16,7 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
fg_color = None
|
||||
|
||||
if not fg_color:
|
||||
return super(VersionDelegate, self).paint(painter, option, index)
|
||||
return super().paint(painter, option, index)
|
||||
|
||||
if option.widget:
|
||||
style = option.widget.style()
|
||||
|
|
@ -60,9 +32,7 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
|
||||
painter.save()
|
||||
|
||||
text = self.displayText(
|
||||
index.data(QtCore.Qt.DisplayRole), option.locale
|
||||
)
|
||||
text = index.data(VERSION_LABEL_ROLE)
|
||||
pen = painter.pen()
|
||||
pen.setColor(fg_color)
|
||||
painter.setPen(pen)
|
||||
|
|
@ -82,77 +52,3 @@ class VersionDelegate(QtWidgets.QStyledItemDelegate):
|
|||
)
|
||||
|
||||
painter.restore()
|
||||
|
||||
def createEditor(self, parent, option, index):
|
||||
item = index.data(TreeModel.ItemRole)
|
||||
if item.get("isGroup") or item.get("isMerged"):
|
||||
return
|
||||
|
||||
editor = QtWidgets.QComboBox(parent)
|
||||
|
||||
def commit_data():
|
||||
if not self.first_run:
|
||||
self.commitData.emit(editor) # Update model data
|
||||
self.version_changed.emit() # Display model data
|
||||
editor.currentIndexChanged.connect(commit_data)
|
||||
|
||||
self.first_run = True
|
||||
self.lock = False
|
||||
|
||||
return editor
|
||||
|
||||
def setEditorData(self, editor, index):
|
||||
if self.lock:
|
||||
# Only set editor data once per delegation
|
||||
return
|
||||
|
||||
editor.clear()
|
||||
|
||||
# Current value of the index
|
||||
item = index.data(TreeModel.ItemRole)
|
||||
value = index.data(QtCore.Qt.DisplayRole)
|
||||
|
||||
project_name = self.get_project_name()
|
||||
# Add all available versions to the editor
|
||||
product_id = item["version_entity"]["productId"]
|
||||
version_entities = list(sorted(
|
||||
ayon_api.get_versions(
|
||||
project_name, product_ids={product_id}, active=True
|
||||
),
|
||||
key=lambda item: abs(item["version"])
|
||||
))
|
||||
|
||||
selected = None
|
||||
items = []
|
||||
is_hero_version = value < 0
|
||||
for version_entity in version_entities:
|
||||
version = version_entity["version"]
|
||||
label = format_version(version)
|
||||
item = QtGui.QStandardItem(label)
|
||||
item.setData(version_entity, QtCore.Qt.UserRole)
|
||||
items.append(item)
|
||||
|
||||
if (
|
||||
version == value
|
||||
or is_hero_version and version < 0
|
||||
):
|
||||
selected = item
|
||||
|
||||
# Reverse items so latest versions be upper
|
||||
items.reverse()
|
||||
for item in items:
|
||||
editor.model().appendRow(item)
|
||||
|
||||
index = 0
|
||||
if selected:
|
||||
index = selected.row()
|
||||
|
||||
# Will trigger index-change signal
|
||||
editor.setCurrentIndex(index)
|
||||
self.first_run = False
|
||||
self.lock = True
|
||||
|
||||
def setModelData(self, editor, model, index):
|
||||
"""Apply the integer version back in the model"""
|
||||
version = editor.itemData(editor.currentIndex())
|
||||
model.setData(index, version["name"])
|
||||
|
|
|
|||
|
|
@ -1,57 +1,113 @@
|
|||
import re
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from collections import defaultdict
|
||||
import collections
|
||||
|
||||
import ayon_api
|
||||
from qtpy import QtCore, QtGui
|
||||
import qtawesome
|
||||
|
||||
from ayon_core.pipeline import (
|
||||
get_current_project_name,
|
||||
HeroVersionType,
|
||||
)
|
||||
from ayon_core.style import get_default_entity_icon_color
|
||||
from ayon_core.tools.utils import get_qt_icon
|
||||
from ayon_core.tools.utils.models import TreeModel, Item
|
||||
from ayon_core.tools.utils.lib import format_version
|
||||
|
||||
ITEM_ID_ROLE = QtCore.Qt.UserRole + 1
|
||||
NAME_COLOR_ROLE = QtCore.Qt.UserRole + 2
|
||||
COUNT_ROLE = QtCore.Qt.UserRole + 3
|
||||
IS_CONTAINER_ITEM_ROLE = QtCore.Qt.UserRole + 4
|
||||
VERSION_IS_LATEST_ROLE = QtCore.Qt.UserRole + 5
|
||||
VERSION_IS_HERO_ROLE = QtCore.Qt.UserRole + 6
|
||||
VERSION_LABEL_ROLE = QtCore.Qt.UserRole + 7
|
||||
VERSION_COLOR_ROLE = QtCore.Qt.UserRole + 8
|
||||
STATUS_NAME_ROLE = QtCore.Qt.UserRole + 9
|
||||
STATUS_COLOR_ROLE = QtCore.Qt.UserRole + 10
|
||||
STATUS_SHORT_ROLE = QtCore.Qt.UserRole + 11
|
||||
STATUS_ICON_ROLE = QtCore.Qt.UserRole + 12
|
||||
PRODUCT_ID_ROLE = QtCore.Qt.UserRole + 13
|
||||
PRODUCT_TYPE_ROLE = QtCore.Qt.UserRole + 14
|
||||
PRODUCT_TYPE_ICON_ROLE = QtCore.Qt.UserRole + 15
|
||||
PRODUCT_GROUP_NAME_ROLE = QtCore.Qt.UserRole + 16
|
||||
PRODUCT_GROUP_ICON_ROLE = QtCore.Qt.UserRole + 17
|
||||
LOADER_NAME_ROLE = QtCore.Qt.UserRole + 18
|
||||
OBJECT_NAME_ROLE = QtCore.Qt.UserRole + 19
|
||||
ACTIVE_SITE_PROGRESS_ROLE = QtCore.Qt.UserRole + 20
|
||||
REMOTE_SITE_PROGRESS_ROLE = QtCore.Qt.UserRole + 21
|
||||
ACTIVE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 22
|
||||
REMOTE_SITE_ICON_ROLE = QtCore.Qt.UserRole + 23
|
||||
# This value hold unique value of container that should be used to identify
|
||||
# containers inbetween refresh.
|
||||
ITEM_UNIQUE_NAME_ROLE = QtCore.Qt.UserRole + 24
|
||||
|
||||
|
||||
def walk_hierarchy(node):
|
||||
"""Recursively yield group node."""
|
||||
for child in node.children():
|
||||
if child.get("isGroupNode"):
|
||||
yield child
|
||||
|
||||
for _child in walk_hierarchy(child):
|
||||
yield _child
|
||||
|
||||
|
||||
class InventoryModel(TreeModel):
|
||||
class InventoryModel(QtGui.QStandardItemModel):
|
||||
"""The model for the inventory"""
|
||||
|
||||
Columns = [
|
||||
column_labels = [
|
||||
"Name",
|
||||
"version",
|
||||
"count",
|
||||
"productType",
|
||||
"group",
|
||||
"loader",
|
||||
"objectName",
|
||||
"active_site",
|
||||
"remote_site",
|
||||
"Version",
|
||||
"Status",
|
||||
"Count",
|
||||
"Product type",
|
||||
"Group",
|
||||
"Loader",
|
||||
"Object name",
|
||||
"Active site",
|
||||
"Remote site",
|
||||
]
|
||||
active_site_col = Columns.index("active_site")
|
||||
remote_site_col = Columns.index("remote_site")
|
||||
name_col = column_labels.index("Name")
|
||||
version_col = column_labels.index("Version")
|
||||
status_col = column_labels.index("Status")
|
||||
count_col = column_labels.index("Count")
|
||||
product_type_col = column_labels.index("Product type")
|
||||
product_group_col = column_labels.index("Group")
|
||||
loader_col = column_labels.index("Loader")
|
||||
object_name_col = column_labels.index("Object name")
|
||||
active_site_col = column_labels.index("Active site")
|
||||
remote_site_col = column_labels.index("Remote site")
|
||||
display_role_by_column = {
|
||||
name_col: QtCore.Qt.DisplayRole,
|
||||
version_col: VERSION_LABEL_ROLE,
|
||||
status_col: STATUS_NAME_ROLE,
|
||||
count_col: COUNT_ROLE,
|
||||
product_type_col: PRODUCT_TYPE_ROLE,
|
||||
product_group_col: PRODUCT_GROUP_NAME_ROLE,
|
||||
loader_col: LOADER_NAME_ROLE,
|
||||
object_name_col: OBJECT_NAME_ROLE,
|
||||
active_site_col: ACTIVE_SITE_PROGRESS_ROLE,
|
||||
remote_site_col: REMOTE_SITE_PROGRESS_ROLE,
|
||||
}
|
||||
decoration_role_by_column = {
|
||||
name_col: QtCore.Qt.DecorationRole,
|
||||
product_type_col: PRODUCT_TYPE_ICON_ROLE,
|
||||
product_group_col: PRODUCT_GROUP_ICON_ROLE,
|
||||
active_site_col: ACTIVE_SITE_ICON_ROLE,
|
||||
remote_site_col: REMOTE_SITE_ICON_ROLE,
|
||||
}
|
||||
foreground_role_by_column = {
|
||||
name_col: NAME_COLOR_ROLE,
|
||||
version_col: VERSION_COLOR_ROLE,
|
||||
status_col: STATUS_COLOR_ROLE
|
||||
}
|
||||
width_by_column = {
|
||||
name_col: 250,
|
||||
version_col: 55,
|
||||
status_col: 100,
|
||||
count_col: 55,
|
||||
product_type_col: 150,
|
||||
product_group_col: 120,
|
||||
loader_col: 150,
|
||||
}
|
||||
|
||||
OUTDATED_COLOR = QtGui.QColor(235, 30, 30)
|
||||
CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30)
|
||||
GRAYOUT_COLOR = QtGui.QColor(160, 160, 160)
|
||||
|
||||
UniqueRole = QtCore.Qt.UserRole + 2 # unique label role
|
||||
|
||||
def __init__(self, controller, parent=None):
|
||||
super(InventoryModel, self).__init__(parent)
|
||||
super().__init__(parent)
|
||||
|
||||
self.setColumnCount(len(self.column_labels))
|
||||
for idx, label in enumerate(self.column_labels):
|
||||
self.setHeaderData(idx, QtCore.Qt.Horizontal, label)
|
||||
|
||||
self.log = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
self._controller = controller
|
||||
|
|
@ -60,103 +116,217 @@ class InventoryModel(TreeModel):
|
|||
|
||||
self._default_icon_color = get_default_entity_icon_color()
|
||||
|
||||
site_icons = self._controller.get_site_provider_icons()
|
||||
|
||||
self._site_icons = {
|
||||
provider: get_qt_icon(icon_def)
|
||||
for provider, icon_def in site_icons.items()
|
||||
}
|
||||
|
||||
def outdated(self, item):
|
||||
return item.get("isOutdated", True)
|
||||
|
||||
def refresh(self, selected=None):
|
||||
"""Refresh the model"""
|
||||
# for debugging or testing, injecting items from outside
|
||||
container_items = self._controller.get_container_items()
|
||||
|
||||
self._clear_items()
|
||||
|
||||
items_by_repre_id = {}
|
||||
for container_item in container_items:
|
||||
# if (
|
||||
# selected is not None
|
||||
# and container_item.item_id not in selected
|
||||
# ):
|
||||
# continue
|
||||
repre_id = container_item.representation_id
|
||||
items = items_by_repre_id.setdefault(repre_id, [])
|
||||
items.append(container_item)
|
||||
|
||||
repre_id = set(items_by_repre_id.keys())
|
||||
repre_info_by_id = self._controller.get_representation_info_items(
|
||||
repre_id
|
||||
)
|
||||
product_ids = {
|
||||
repre_info.product_id
|
||||
for repre_info in repre_info_by_id.values()
|
||||
}
|
||||
version_items_by_product_id = self._controller.get_version_items(
|
||||
product_ids
|
||||
)
|
||||
# SiteSync addon information
|
||||
progress_by_id = self._controller.get_representations_site_progress(
|
||||
repre_id
|
||||
)
|
||||
sites_info = self._controller.get_sites_information()
|
||||
site_icons = {
|
||||
provider: get_qt_icon(icon_def)
|
||||
for provider, icon_def in (
|
||||
self._controller.get_site_provider_icons().items()
|
||||
)
|
||||
}
|
||||
status_items_by_name = {
|
||||
status_item.name: status_item
|
||||
for status_item in self._controller.get_project_status_items()
|
||||
}
|
||||
|
||||
group_item_icon = qtawesome.icon(
|
||||
"fa.folder", color=self._default_icon_color
|
||||
)
|
||||
valid_item_icon = qtawesome.icon(
|
||||
"fa.file-o", color=self._default_icon_color
|
||||
)
|
||||
invalid_item_icon = qtawesome.icon(
|
||||
"fa.exclamation-circle", color=self._default_icon_color
|
||||
)
|
||||
group_icon = qtawesome.icon(
|
||||
"fa.object-group", color=self._default_icon_color
|
||||
)
|
||||
product_type_icon = qtawesome.icon(
|
||||
"fa.folder", color="#0091B2"
|
||||
)
|
||||
group_item_font = QtGui.QFont()
|
||||
group_item_font.setBold(True)
|
||||
|
||||
active_site_icon = site_icons.get(sites_info["active_site_provider"])
|
||||
remote_site_icon = site_icons.get(sites_info["remote_site_provider"])
|
||||
|
||||
root_item = self.invisibleRootItem()
|
||||
|
||||
group_items = []
|
||||
for repre_id, container_items in items_by_repre_id.items():
|
||||
repre_info = repre_info_by_id[repre_id]
|
||||
version_label = "N/A"
|
||||
version_color = None
|
||||
is_latest = False
|
||||
is_hero = False
|
||||
status_name = None
|
||||
status_color = None
|
||||
status_short = None
|
||||
if not repre_info.is_valid:
|
||||
group_name = "< Entity N/A >"
|
||||
item_icon = invalid_item_icon
|
||||
|
||||
else:
|
||||
group_name = "{}_{}: ({})".format(
|
||||
repre_info.folder_path.rsplit("/")[-1],
|
||||
repre_info.product_name,
|
||||
repre_info.representation_name
|
||||
)
|
||||
item_icon = valid_item_icon
|
||||
|
||||
version_items = (
|
||||
version_items_by_product_id[repre_info.product_id]
|
||||
)
|
||||
version_item = version_items[repre_info.version_id]
|
||||
version_label = format_version(version_item.version)
|
||||
is_hero = version_item.version < 0
|
||||
is_latest = version_item.is_latest
|
||||
if not is_latest:
|
||||
version_color = self.OUTDATED_COLOR
|
||||
status_name = version_item.status
|
||||
status_item = status_items_by_name.get(status_name)
|
||||
if status_item:
|
||||
status_short = status_item.short
|
||||
status_color = status_item.color
|
||||
|
||||
container_model_items = []
|
||||
for container_item in container_items:
|
||||
unique_name = (
|
||||
repre_info.representation_name
|
||||
+ container_item.object_name or "<none>"
|
||||
)
|
||||
|
||||
item = QtGui.QStandardItem()
|
||||
item.setColumnCount(root_item.columnCount())
|
||||
item.setData(container_item.namespace, QtCore.Qt.DisplayRole)
|
||||
item.setData(self.GRAYOUT_COLOR, NAME_COLOR_ROLE)
|
||||
item.setData(self.GRAYOUT_COLOR, VERSION_COLOR_ROLE)
|
||||
item.setData(item_icon, QtCore.Qt.DecorationRole)
|
||||
item.setData(repre_info.product_id, PRODUCT_ID_ROLE)
|
||||
item.setData(container_item.item_id, ITEM_ID_ROLE)
|
||||
item.setData(version_label, VERSION_LABEL_ROLE)
|
||||
item.setData(container_item.loader_name, LOADER_NAME_ROLE)
|
||||
item.setData(container_item.object_name, OBJECT_NAME_ROLE)
|
||||
item.setData(True, IS_CONTAINER_ITEM_ROLE)
|
||||
item.setData(unique_name, ITEM_UNIQUE_NAME_ROLE)
|
||||
container_model_items.append(item)
|
||||
|
||||
if not container_model_items:
|
||||
continue
|
||||
|
||||
progress = progress_by_id[repre_id]
|
||||
active_site_progress = "{}%".format(
|
||||
max(progress["active_site"], 0) * 100
|
||||
)
|
||||
remote_site_progress = "{}%".format(
|
||||
max(progress["remote_site"], 0) * 100
|
||||
)
|
||||
|
||||
group_item = QtGui.QStandardItem()
|
||||
group_item.setColumnCount(root_item.columnCount())
|
||||
group_item.setData(group_name, QtCore.Qt.DisplayRole)
|
||||
group_item.setData(group_name, ITEM_UNIQUE_NAME_ROLE)
|
||||
group_item.setData(group_item_icon, QtCore.Qt.DecorationRole)
|
||||
group_item.setData(group_item_font, QtCore.Qt.FontRole)
|
||||
group_item.setData(repre_info.product_id, PRODUCT_ID_ROLE)
|
||||
group_item.setData(repre_info.product_type, PRODUCT_TYPE_ROLE)
|
||||
group_item.setData(product_type_icon, PRODUCT_TYPE_ICON_ROLE)
|
||||
group_item.setData(is_latest, VERSION_IS_LATEST_ROLE)
|
||||
group_item.setData(is_hero, VERSION_IS_HERO_ROLE)
|
||||
group_item.setData(version_label, VERSION_LABEL_ROLE)
|
||||
group_item.setData(len(container_items), COUNT_ROLE)
|
||||
group_item.setData(status_name, STATUS_NAME_ROLE)
|
||||
group_item.setData(status_short, STATUS_SHORT_ROLE)
|
||||
group_item.setData(status_color, STATUS_COLOR_ROLE)
|
||||
|
||||
group_item.setData(
|
||||
active_site_progress, ACTIVE_SITE_PROGRESS_ROLE
|
||||
)
|
||||
group_item.setData(
|
||||
remote_site_progress, REMOTE_SITE_PROGRESS_ROLE
|
||||
)
|
||||
group_item.setData(active_site_icon, ACTIVE_SITE_ICON_ROLE)
|
||||
group_item.setData(remote_site_icon, REMOTE_SITE_ICON_ROLE)
|
||||
group_item.setData(False, IS_CONTAINER_ITEM_ROLE)
|
||||
|
||||
if version_color is not None:
|
||||
group_item.setData(version_color, VERSION_COLOR_ROLE)
|
||||
|
||||
if repre_info.product_group:
|
||||
group_item.setData(
|
||||
repre_info.product_group, PRODUCT_GROUP_NAME_ROLE
|
||||
)
|
||||
group_item.setData(group_icon, PRODUCT_GROUP_ICON_ROLE)
|
||||
|
||||
group_item.appendRows(container_model_items)
|
||||
group_items.append(group_item)
|
||||
|
||||
if group_items:
|
||||
root_item.appendRows(group_items)
|
||||
|
||||
def flags(self, index):
|
||||
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
||||
|
||||
def data(self, index, role):
|
||||
if not index.isValid():
|
||||
return
|
||||
|
||||
item = index.internalPointer()
|
||||
col = index.column()
|
||||
if role == QtCore.Qt.DisplayRole:
|
||||
role = self.display_role_by_column.get(col)
|
||||
if role is None:
|
||||
print(col, role)
|
||||
return None
|
||||
|
||||
if role == QtCore.Qt.FontRole:
|
||||
# Make top-level entries bold
|
||||
if item.get("isGroupNode") or item.get("isNotSet"): # group-item
|
||||
font = QtGui.QFont()
|
||||
font.setBold(True)
|
||||
return font
|
||||
elif role == QtCore.Qt.DecorationRole:
|
||||
role = self.decoration_role_by_column.get(col)
|
||||
if role is None:
|
||||
return None
|
||||
|
||||
if role == QtCore.Qt.ForegroundRole:
|
||||
# Set the text color to the OUTDATED_COLOR when the
|
||||
# collected version is not the same as the highest version
|
||||
key = self.Columns[index.column()]
|
||||
if key == "version": # version
|
||||
if item.get("isGroupNode"): # group-item
|
||||
if self.outdated(item):
|
||||
return self.OUTDATED_COLOR
|
||||
elif role == QtCore.Qt.ForegroundRole:
|
||||
role = self.foreground_role_by_column.get(col)
|
||||
if role is None:
|
||||
return None
|
||||
|
||||
if self._hierarchy_view:
|
||||
# If current group is not outdated, check if any
|
||||
# outdated children.
|
||||
for _node in walk_hierarchy(item):
|
||||
if self.outdated(_node):
|
||||
return self.CHILD_OUTDATED_COLOR
|
||||
else:
|
||||
if col != 0:
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
|
||||
if self._hierarchy_view:
|
||||
# Although this is not a group item, we still need
|
||||
# to distinguish which one contain outdated child.
|
||||
for _node in walk_hierarchy(item):
|
||||
if self.outdated(_node):
|
||||
return self.CHILD_OUTDATED_COLOR.darker(150)
|
||||
|
||||
return self.GRAYOUT_COLOR
|
||||
|
||||
if key == "Name" and not item.get("isGroupNode"):
|
||||
return self.GRAYOUT_COLOR
|
||||
|
||||
# Add icons
|
||||
if role == QtCore.Qt.DecorationRole:
|
||||
if index.column() == 0:
|
||||
# Override color
|
||||
color = item.get("color", self._default_icon_color)
|
||||
if item.get("isGroupNode"): # group-item
|
||||
return qtawesome.icon("fa.folder", color=color)
|
||||
if item.get("isNotSet"):
|
||||
return qtawesome.icon("fa.exclamation-circle", color=color)
|
||||
|
||||
return qtawesome.icon("fa.file-o", color=color)
|
||||
|
||||
if index.column() == 3:
|
||||
# Product type icon
|
||||
return item.get("productTypeIcon", None)
|
||||
|
||||
column_name = self.Columns[index.column()]
|
||||
|
||||
if column_name == "group" and item.get("group"):
|
||||
return qtawesome.icon("fa.object-group",
|
||||
color=get_default_entity_icon_color())
|
||||
|
||||
if item.get("isGroupNode"):
|
||||
if column_name == "active_site":
|
||||
provider = item.get("active_site_provider")
|
||||
return self._site_icons.get(provider)
|
||||
|
||||
if column_name == "remote_site":
|
||||
provider = item.get("remote_site_provider")
|
||||
return self._site_icons.get(provider)
|
||||
|
||||
if role == QtCore.Qt.DisplayRole and item.get("isGroupNode"):
|
||||
column_name = self.Columns[index.column()]
|
||||
progress = None
|
||||
if column_name == "active_site":
|
||||
progress = item.get("active_site_progress", 0)
|
||||
elif column_name == "remote_site":
|
||||
progress = item.get("remote_site_progress", 0)
|
||||
if progress is not None:
|
||||
return "{}%".format(max(progress, 0) * 100)
|
||||
|
||||
if role == self.UniqueRole:
|
||||
return item["representation"] + item.get("objectName", "<none>")
|
||||
|
||||
return super(InventoryModel, self).data(index, role)
|
||||
return super().data(index, role)
|
||||
|
||||
def set_hierarchy_view(self, state):
|
||||
"""Set whether to display products in hierarchy view."""
|
||||
|
|
@ -165,299 +335,34 @@ class InventoryModel(TreeModel):
|
|||
if state != self._hierarchy_view:
|
||||
self._hierarchy_view = state
|
||||
|
||||
def refresh(self, selected=None, containers=None):
|
||||
"""Refresh the model"""
|
||||
|
||||
# for debugging or testing, injecting items from outside
|
||||
if containers is None:
|
||||
containers = self._controller.get_containers()
|
||||
|
||||
self.clear()
|
||||
if not selected or not self._hierarchy_view:
|
||||
self._add_containers(containers)
|
||||
return
|
||||
|
||||
# Filter by cherry-picked items
|
||||
self._add_containers((
|
||||
container
|
||||
for container in containers
|
||||
if container["objectName"] in selected
|
||||
))
|
||||
|
||||
def _add_containers(self, containers, parent=None):
|
||||
"""Add the items to the model.
|
||||
|
||||
The items should be formatted similar to `api.ls()` returns, an item
|
||||
is then represented as:
|
||||
{"filename_v001.ma": [full/filename/of/loaded/filename_v001.ma,
|
||||
full/filename/of/loaded/filename_v001.ma],
|
||||
"nodetype" : "reference",
|
||||
"node": "referenceNode1"}
|
||||
|
||||
Note: When performing an additional call to `add_items` it will *not*
|
||||
group the new items with previously existing item groups of the
|
||||
same type.
|
||||
|
||||
Args:
|
||||
containers (generator): Container items.
|
||||
parent (Item, optional): Set this item as parent for the added
|
||||
items when provided. Defaults to the root of the model.
|
||||
|
||||
Returns:
|
||||
node.Item: root node which has children added based on the data
|
||||
"""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
|
||||
self.beginResetModel()
|
||||
|
||||
# Group by representation
|
||||
grouped = defaultdict(lambda: {"containers": list()})
|
||||
for container in containers:
|
||||
repre_id = container["representation"]
|
||||
grouped[repre_id]["containers"].append(container)
|
||||
|
||||
(
|
||||
repres_by_id,
|
||||
versions_by_id,
|
||||
products_by_id,
|
||||
folders_by_id,
|
||||
) = self._query_entities(project_name, set(grouped.keys()))
|
||||
# Add to model
|
||||
not_found = defaultdict(list)
|
||||
not_found_ids = []
|
||||
for repre_id, group_dict in sorted(grouped.items()):
|
||||
group_containers = group_dict["containers"]
|
||||
representation = repres_by_id.get(repre_id)
|
||||
if not representation:
|
||||
not_found["representation"].extend(group_containers)
|
||||
not_found_ids.append(repre_id)
|
||||
def get_outdated_item_ids(self, ignore_hero=True):
|
||||
outdated_item_ids = []
|
||||
root_item = self.invisibleRootItem()
|
||||
for row in range(root_item.rowCount()):
|
||||
group_item = root_item.child(row)
|
||||
if group_item.data(VERSION_IS_LATEST_ROLE):
|
||||
continue
|
||||
|
||||
version_entity = versions_by_id.get(representation["versionId"])
|
||||
if not version_entity:
|
||||
not_found["version"].extend(group_containers)
|
||||
not_found_ids.append(repre_id)
|
||||
if ignore_hero and group_item.data(VERSION_IS_HERO_ROLE):
|
||||
continue
|
||||
|
||||
product_entity = products_by_id.get(version_entity["productId"])
|
||||
if not product_entity:
|
||||
not_found["product"].extend(group_containers)
|
||||
not_found_ids.append(repre_id)
|
||||
continue
|
||||
for idx in range(group_item.rowCount()):
|
||||
item = group_item.child(idx)
|
||||
outdated_item_ids.append(item.data(ITEM_ID_ROLE))
|
||||
return outdated_item_ids
|
||||
|
||||
folder_entity = folders_by_id.get(product_entity["folderId"])
|
||||
if not folder_entity:
|
||||
not_found["folder"].extend(group_containers)
|
||||
not_found_ids.append(repre_id)
|
||||
continue
|
||||
|
||||
group_dict.update({
|
||||
"representation": representation,
|
||||
"version": version_entity,
|
||||
"product": product_entity,
|
||||
"folder": folder_entity
|
||||
})
|
||||
|
||||
for _repre_id in not_found_ids:
|
||||
grouped.pop(_repre_id)
|
||||
|
||||
for where, group_containers in not_found.items():
|
||||
# create the group header
|
||||
group_node = Item()
|
||||
name = "< NOT FOUND - {} >".format(where)
|
||||
group_node["Name"] = name
|
||||
group_node["representation"] = name
|
||||
group_node["count"] = len(group_containers)
|
||||
group_node["isGroupNode"] = False
|
||||
group_node["isNotSet"] = True
|
||||
|
||||
self.add_child(group_node, parent=parent)
|
||||
|
||||
for container in group_containers:
|
||||
item_node = Item()
|
||||
item_node.update(container)
|
||||
item_node["Name"] = container.get("objectName", "NO NAME")
|
||||
item_node["isNotFound"] = True
|
||||
self.add_child(item_node, parent=group_node)
|
||||
|
||||
# TODO Use product icons
|
||||
product_type_icon = qtawesome.icon(
|
||||
"fa.folder", color="#0091B2"
|
||||
)
|
||||
# Prepare site sync specific data
|
||||
progress_by_id = self._controller.get_representations_site_progress(
|
||||
set(grouped.keys())
|
||||
)
|
||||
sites_info = self._controller.get_sites_information()
|
||||
|
||||
# Query the highest available version so the model can know
|
||||
# whether current version is currently up-to-date.
|
||||
highest_version_by_product_id = ayon_api.get_last_versions(
|
||||
project_name,
|
||||
product_ids={
|
||||
group["version"]["productId"] for group in grouped.values()
|
||||
},
|
||||
fields={"productId", "version"}
|
||||
)
|
||||
# Map value to `version` key
|
||||
highest_version_by_product_id = {
|
||||
product_id: version["version"]
|
||||
for product_id, version in highest_version_by_product_id.items()
|
||||
}
|
||||
|
||||
for repre_id, group_dict in sorted(grouped.items()):
|
||||
group_containers = group_dict["containers"]
|
||||
repre_entity = group_dict["representation"]
|
||||
version_entity = group_dict["version"]
|
||||
folder_entity = group_dict["folder"]
|
||||
product_entity = group_dict["product"]
|
||||
|
||||
product_type = product_entity["productType"]
|
||||
|
||||
# create the group header
|
||||
group_node = Item()
|
||||
group_node["Name"] = "{}_{}: ({})".format(
|
||||
folder_entity["name"],
|
||||
product_entity["name"],
|
||||
repre_entity["name"]
|
||||
)
|
||||
group_node["representation"] = repre_id
|
||||
|
||||
# Detect hero version type
|
||||
version = version_entity["version"]
|
||||
if version < 0:
|
||||
version = HeroVersionType(version)
|
||||
group_node["version"] = version
|
||||
|
||||
# Check if the version is outdated.
|
||||
# Hero versions are never considered to be outdated.
|
||||
is_outdated = False
|
||||
if not isinstance(version, HeroVersionType):
|
||||
last_version = highest_version_by_product_id.get(
|
||||
version_entity["productId"])
|
||||
if last_version is not None:
|
||||
is_outdated = version_entity["version"] != last_version
|
||||
group_node["isOutdated"] = is_outdated
|
||||
|
||||
group_node["productType"] = product_type or ""
|
||||
group_node["productTypeIcon"] = product_type_icon
|
||||
group_node["count"] = len(group_containers)
|
||||
group_node["isGroupNode"] = True
|
||||
group_node["group"] = product_entity["attrib"].get("productGroup")
|
||||
|
||||
# Site sync specific data
|
||||
progress = progress_by_id[repre_id]
|
||||
group_node.update(sites_info)
|
||||
group_node["active_site_progress"] = progress["active_site"]
|
||||
group_node["remote_site_progress"] = progress["remote_site"]
|
||||
|
||||
self.add_child(group_node, parent=parent)
|
||||
|
||||
for container in group_containers:
|
||||
item_node = Item()
|
||||
item_node.update(container)
|
||||
|
||||
# store the current version on the item
|
||||
item_node["version"] = version_entity["version"]
|
||||
item_node["version_entity"] = version_entity
|
||||
|
||||
# Remapping namespace to item name.
|
||||
# Noted that the name key is capital "N", by doing this, we
|
||||
# can view namespace in GUI without changing container data.
|
||||
item_node["Name"] = container["namespace"]
|
||||
|
||||
self.add_child(item_node, parent=group_node)
|
||||
|
||||
self.endResetModel()
|
||||
|
||||
return self._root_item
|
||||
|
||||
def _query_entities(self, project_name, repre_ids):
|
||||
"""Query entities for representations from containers.
|
||||
|
||||
Returns:
|
||||
tuple[dict, dict, dict, dict]: Representation, version, product
|
||||
and folder documents by id.
|
||||
"""
|
||||
|
||||
repres_by_id = {}
|
||||
versions_by_id = {}
|
||||
products_by_id = {}
|
||||
folders_by_id = {}
|
||||
output = (
|
||||
repres_by_id,
|
||||
versions_by_id,
|
||||
products_by_id,
|
||||
folders_by_id,
|
||||
)
|
||||
|
||||
filtered_repre_ids = set()
|
||||
for repre_id in repre_ids:
|
||||
# Filter out invalid representation ids
|
||||
# NOTE: This is added because scenes from OpenPype did contain
|
||||
# ObjectId from mongo.
|
||||
try:
|
||||
uuid.UUID(repre_id)
|
||||
filtered_repre_ids.add(repre_id)
|
||||
except ValueError:
|
||||
continue
|
||||
if not filtered_repre_ids:
|
||||
return output
|
||||
|
||||
repre_entities = ayon_api.get_representations(project_name, repre_ids)
|
||||
repres_by_id.update({
|
||||
repre_entity["id"]: repre_entity
|
||||
for repre_entity in repre_entities
|
||||
})
|
||||
version_ids = {
|
||||
repre_entity["versionId"]
|
||||
for repre_entity in repres_by_id.values()
|
||||
}
|
||||
if not version_ids:
|
||||
return output
|
||||
|
||||
versions_by_id.update({
|
||||
version_entity["id"]: version_entity
|
||||
for version_entity in ayon_api.get_versions(
|
||||
project_name, version_ids=version_ids
|
||||
)
|
||||
})
|
||||
|
||||
product_ids = {
|
||||
version_entity["productId"]
|
||||
for version_entity in versions_by_id.values()
|
||||
}
|
||||
if not product_ids:
|
||||
return output
|
||||
|
||||
products_by_id.update({
|
||||
product_entity["id"]: product_entity
|
||||
for product_entity in ayon_api.get_products(
|
||||
project_name, product_ids=product_ids
|
||||
)
|
||||
})
|
||||
folder_ids = {
|
||||
product_entity["folderId"]
|
||||
for product_entity in products_by_id.values()
|
||||
}
|
||||
if not folder_ids:
|
||||
return output
|
||||
|
||||
folders_by_id.update({
|
||||
folder_entity["id"]: folder_entity
|
||||
for folder_entity in ayon_api.get_folders(
|
||||
project_name, folder_ids=folder_ids
|
||||
)
|
||||
})
|
||||
return output
|
||||
def _clear_items(self):
|
||||
root_item = self.invisibleRootItem()
|
||||
root_item.removeRows(0, root_item.rowCount())
|
||||
|
||||
|
||||
class FilterProxyModel(QtCore.QSortFilterProxyModel):
|
||||
"""Filter model to where key column's value is in the filtered tags"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FilterProxyModel, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.setDynamicSortFilter(True)
|
||||
self.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
self._filter_outdated = False
|
||||
self._hierarchy_view = False
|
||||
|
||||
|
|
@ -467,28 +372,23 @@ class FilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
|
||||
# Always allow bottom entries (individual containers), since their
|
||||
# parent group hidden if it wouldn't have been validated.
|
||||
rows = model.rowCount(source_index)
|
||||
if not rows:
|
||||
if source_index.data(IS_CONTAINER_ITEM_ROLE):
|
||||
return True
|
||||
|
||||
# Filter by regex
|
||||
if hasattr(self, "filterRegExp"):
|
||||
regex = self.filterRegExp()
|
||||
else:
|
||||
regex = self.filterRegularExpression()
|
||||
pattern = regex.pattern()
|
||||
if pattern:
|
||||
pattern = re.escape(pattern)
|
||||
|
||||
if not self._matches(row, parent, pattern):
|
||||
return False
|
||||
|
||||
if self._filter_outdated:
|
||||
# When filtering to outdated we filter the up to date entries
|
||||
# thus we "allow" them when they are outdated
|
||||
if not self._is_outdated(row, parent):
|
||||
if source_index.data(VERSION_IS_LATEST_ROLE):
|
||||
return False
|
||||
|
||||
# Filter by regex
|
||||
if hasattr(self, "filterRegularExpression"):
|
||||
regex = self.filterRegularExpression()
|
||||
else:
|
||||
regex = self.filterRegExp()
|
||||
|
||||
if not self._matches(row, parent, regex.pattern()):
|
||||
return False
|
||||
return True
|
||||
|
||||
def set_filter_outdated(self, state):
|
||||
|
|
@ -505,37 +405,6 @@ class FilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
if state != self._hierarchy_view:
|
||||
self._hierarchy_view = state
|
||||
|
||||
def _is_outdated(self, row, parent):
|
||||
"""Return whether row is outdated.
|
||||
|
||||
A row is considered outdated if `isOutdated` data is true or not set.
|
||||
|
||||
"""
|
||||
def outdated(node):
|
||||
return node.get("isOutdated", True)
|
||||
|
||||
index = self.sourceModel().index(row, self.filterKeyColumn(), parent)
|
||||
|
||||
# The scene contents are grouped by "representation", e.g. the same
|
||||
# "representation" loaded twice is grouped under the same header.
|
||||
# Since the version check filters these parent groups we skip that
|
||||
# check for the individual children.
|
||||
has_parent = index.parent().isValid()
|
||||
if has_parent and not self._hierarchy_view:
|
||||
return True
|
||||
|
||||
# Filter to those that have the different version numbers
|
||||
node = index.internalPointer()
|
||||
if outdated(node):
|
||||
return True
|
||||
|
||||
if self._hierarchy_view:
|
||||
for _node in walk_hierarchy(node):
|
||||
if outdated(_node):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _matches(self, row, parent, pattern):
|
||||
"""Return whether row matches regex pattern.
|
||||
|
||||
|
|
@ -548,38 +417,31 @@ class FilterProxyModel(QtCore.QSortFilterProxyModel):
|
|||
bool
|
||||
|
||||
"""
|
||||
if not pattern:
|
||||
return True
|
||||
|
||||
flags = 0
|
||||
if self.sortCaseSensitivity() == QtCore.Qt.CaseInsensitive:
|
||||
flags = re.IGNORECASE
|
||||
|
||||
regex = re.compile(re.escape(pattern), flags=flags)
|
||||
|
||||
model = self.sourceModel()
|
||||
column = self.filterKeyColumn()
|
||||
role = self.filterRole()
|
||||
|
||||
def matches(row, parent, pattern):
|
||||
matches_queue = collections.deque()
|
||||
matches_queue.append((row, parent))
|
||||
while matches_queue:
|
||||
queue_item = matches_queue.popleft()
|
||||
row, parent = queue_item
|
||||
|
||||
index = model.index(row, column, parent)
|
||||
key = model.data(index, role)
|
||||
if re.search(pattern, key, re.IGNORECASE):
|
||||
value = model.data(index, role)
|
||||
if regex.search(value):
|
||||
return True
|
||||
|
||||
if matches(row, parent, pattern):
|
||||
return True
|
||||
for idx in range(model.rowCount(index)):
|
||||
matches_queue.append((idx, index))
|
||||
|
||||
# Also allow if any of the children matches
|
||||
source_index = model.index(row, column, parent)
|
||||
rows = model.rowCount(source_index)
|
||||
|
||||
if any(
|
||||
matches(idx, source_index, pattern)
|
||||
for idx in range(rows)
|
||||
):
|
||||
return True
|
||||
|
||||
if not self._hierarchy_view:
|
||||
return False
|
||||
|
||||
for idx in range(rows):
|
||||
child_index = model.index(idx, column, source_index)
|
||||
child_rows = model.rowCount(child_index)
|
||||
return any(
|
||||
self._matches(child_idx, child_index, pattern)
|
||||
for child_idx in range(child_rows)
|
||||
)
|
||||
|
||||
return True
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
from .containers import ContainersModel
|
||||
from .sitesync import SiteSyncModel
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ContainersModel",
|
||||
"SiteSyncModel",
|
||||
)
|
||||
|
|
|
|||
343
client/ayon_core/tools/sceneinventory/models/containers.py
Normal file
343
client/ayon_core/tools/sceneinventory/models/containers.py
Normal file
|
|
@ -0,0 +1,343 @@
|
|||
import uuid
|
||||
import collections
|
||||
|
||||
import ayon_api
|
||||
from ayon_api.graphql import GraphQlQuery
|
||||
from ayon_core.host import ILoadHost
|
||||
|
||||
|
||||
# --- Implementation that should be in ayon-python-api ---
|
||||
# The implementation is not available in all versions of ayon-python-api.
|
||||
RepresentationHierarchy = collections.namedtuple(
|
||||
"RepresentationHierarchy",
|
||||
("folder", "product", "version", "representation")
|
||||
)
|
||||
|
||||
|
||||
def representations_parent_ids_qraphql_query():
|
||||
query = GraphQlQuery("RepresentationsHierarchyQuery")
|
||||
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
repre_ids_var = query.add_variable("representationIds", "[String!]")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
project_field.set_filter("name", project_name_var)
|
||||
|
||||
repres_field = project_field.add_field_with_edges("representations")
|
||||
repres_field.add_field("id")
|
||||
repres_field.add_field("name")
|
||||
repres_field.set_filter("ids", repre_ids_var)
|
||||
version_field = repres_field.add_field("version")
|
||||
version_field.add_field("id")
|
||||
product_field = version_field.add_field("product")
|
||||
product_field.add_field("id")
|
||||
product_field.add_field("name")
|
||||
product_field.add_field("productType")
|
||||
product_attrib_field = product_field.add_field("attrib")
|
||||
product_attrib_field.add_field("productGroup")
|
||||
folder_field = product_field.add_field("folder")
|
||||
folder_field.add_field("id")
|
||||
folder_field.add_field("path")
|
||||
return query
|
||||
|
||||
|
||||
def get_representations_hierarchy(project_name, representation_ids):
|
||||
"""Find representations parents by representation id.
|
||||
|
||||
Representation parent entities up to project.
|
||||
|
||||
Args:
|
||||
project_name (str): Project where to look for entities.
|
||||
representation_ids (Iterable[str]): Representation ids.
|
||||
|
||||
Returns:
|
||||
dict[str, RepresentationParents]: Parent entities by
|
||||
representation id.
|
||||
|
||||
"""
|
||||
if not representation_ids:
|
||||
return {}
|
||||
|
||||
repre_ids = set(representation_ids)
|
||||
output = {
|
||||
repre_id: RepresentationHierarchy(None, None, None, None)
|
||||
for repre_id in representation_ids
|
||||
}
|
||||
|
||||
query = representations_parent_ids_qraphql_query()
|
||||
query.set_variable_value("projectName", project_name)
|
||||
query.set_variable_value("representationIds", list(repre_ids))
|
||||
|
||||
con = ayon_api.get_server_api_connection()
|
||||
parsed_data = query.query(con)
|
||||
for repre in parsed_data["project"]["representations"]:
|
||||
repre_id = repre["id"]
|
||||
version = repre.pop("version")
|
||||
product = version.pop("product")
|
||||
folder = product.pop("folder")
|
||||
|
||||
output[repre_id] = RepresentationHierarchy(
|
||||
folder, product, version, repre
|
||||
)
|
||||
|
||||
return output
|
||||
# --- END of ayon-python-api implementation ---
|
||||
|
||||
|
||||
class ContainerItem:
|
||||
def __init__(
|
||||
self,
|
||||
representation_id,
|
||||
loader_name,
|
||||
namespace,
|
||||
name,
|
||||
object_name,
|
||||
item_id
|
||||
):
|
||||
self.representation_id = representation_id
|
||||
self.loader_name = loader_name
|
||||
self.object_name = object_name
|
||||
self.namespace = namespace
|
||||
self.name = name
|
||||
self.item_id = item_id
|
||||
|
||||
@classmethod
|
||||
def from_container_data(cls, container):
|
||||
return cls(
|
||||
representation_id=container["representation"],
|
||||
loader_name=container["loader"],
|
||||
namespace=container["namespace"],
|
||||
name=container["name"],
|
||||
object_name=container["objectName"],
|
||||
item_id=uuid.uuid4().hex,
|
||||
)
|
||||
|
||||
|
||||
class RepresentationInfo:
|
||||
def __init__(
|
||||
self,
|
||||
folder_id,
|
||||
folder_path,
|
||||
product_id,
|
||||
product_name,
|
||||
product_type,
|
||||
product_group,
|
||||
version_id,
|
||||
representation_name,
|
||||
):
|
||||
self.folder_id = folder_id
|
||||
self.folder_path = folder_path
|
||||
self.product_id = product_id
|
||||
self.product_name = product_name
|
||||
self.product_type = product_type
|
||||
self.product_group = product_group
|
||||
self.version_id = version_id
|
||||
self.representation_name = representation_name
|
||||
self._is_valid = None
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
if self._is_valid is None:
|
||||
self._is_valid = (
|
||||
self.folder_id is not None
|
||||
and self.product_id is not None
|
||||
and self.version_id is not None
|
||||
and self.representation_name is not None
|
||||
)
|
||||
return self._is_valid
|
||||
|
||||
@classmethod
|
||||
def new_invalid(cls):
|
||||
return cls(None, None, None, None, None, None, None, None)
|
||||
|
||||
|
||||
class VersionItem:
|
||||
def __init__(self, version_id, product_id, version, status, is_latest):
|
||||
self.version = version
|
||||
self.version_id = version_id
|
||||
self.product_id = product_id
|
||||
self.version = version
|
||||
self.status = status
|
||||
self.is_latest = is_latest
|
||||
|
||||
@property
|
||||
def is_hero(self):
|
||||
return self.version < 0
|
||||
|
||||
@classmethod
|
||||
def from_entity(cls, version_entity, is_latest):
|
||||
return cls(
|
||||
version_id=version_entity["id"],
|
||||
product_id=version_entity["productId"],
|
||||
version=version_entity["version"],
|
||||
status=version_entity["status"],
|
||||
is_latest=is_latest,
|
||||
)
|
||||
|
||||
|
||||
class ContainersModel:
|
||||
def __init__(self, controller):
|
||||
self._controller = controller
|
||||
self._items_cache = None
|
||||
self._containers_by_id = {}
|
||||
self._container_items_by_id = {}
|
||||
self._version_items_by_product_id = {}
|
||||
self._repre_info_by_id = {}
|
||||
|
||||
def reset(self):
|
||||
self._items_cache = None
|
||||
self._containers_by_id = {}
|
||||
self._container_items_by_id = {}
|
||||
self._version_items_by_product_id = {}
|
||||
self._repre_info_by_id = {}
|
||||
|
||||
def get_containers(self):
|
||||
self._update_cache()
|
||||
return list(self._containers_by_id.values())
|
||||
|
||||
def get_containers_by_item_ids(self, item_ids):
|
||||
return {
|
||||
item_id: self._containers_by_id.get(item_id)
|
||||
for item_id in item_ids
|
||||
}
|
||||
|
||||
def get_container_items(self):
|
||||
self._update_cache()
|
||||
return list(self._items_cache)
|
||||
|
||||
def get_container_items_by_id(self, item_ids):
|
||||
return {
|
||||
item_id: self._container_items_by_id.get(item_id)
|
||||
for item_id in item_ids
|
||||
}
|
||||
|
||||
def get_representation_info_items(self, representation_ids):
|
||||
output = {}
|
||||
missing_repre_ids = set()
|
||||
for repre_id in representation_ids:
|
||||
try:
|
||||
uuid.UUID(repre_id)
|
||||
except ValueError:
|
||||
output[repre_id] = RepresentationInfo.new_invalid()
|
||||
continue
|
||||
|
||||
repre_info = self._repre_info_by_id.get(repre_id)
|
||||
if repre_info is None:
|
||||
missing_repre_ids.add(repre_id)
|
||||
else:
|
||||
output[repre_id] = repre_info
|
||||
|
||||
if not missing_repre_ids:
|
||||
return output
|
||||
|
||||
project_name = self._controller.get_current_project_name()
|
||||
repre_hierarchy_by_id = get_representations_hierarchy(
|
||||
project_name, missing_repre_ids
|
||||
)
|
||||
for repre_id, repre_hierarchy in repre_hierarchy_by_id.items():
|
||||
kwargs = {
|
||||
"folder_id": None,
|
||||
"folder_path": None,
|
||||
"product_id": None,
|
||||
"product_name": None,
|
||||
"product_type": None,
|
||||
"product_group": None,
|
||||
"version_id": None,
|
||||
"representation_name": None,
|
||||
}
|
||||
folder = repre_hierarchy.folder
|
||||
product = repre_hierarchy.product
|
||||
version = repre_hierarchy.version
|
||||
repre = repre_hierarchy.representation
|
||||
if folder:
|
||||
kwargs["folder_id"] = folder["id"]
|
||||
kwargs["folder_path"] = folder["path"]
|
||||
if product:
|
||||
group = product["attrib"]["productGroup"]
|
||||
kwargs["product_id"] = product["id"]
|
||||
kwargs["product_name"] = product["name"]
|
||||
kwargs["product_type"] = product["productType"]
|
||||
kwargs["product_group"] = group
|
||||
if version:
|
||||
kwargs["version_id"] = version["id"]
|
||||
if repre:
|
||||
kwargs["representation_name"] = repre["name"]
|
||||
|
||||
repre_info = RepresentationInfo(**kwargs)
|
||||
self._repre_info_by_id[repre_id] = repre_info
|
||||
output[repre_id] = repre_info
|
||||
return output
|
||||
|
||||
def get_version_items(self, product_ids):
|
||||
if not product_ids:
|
||||
return {}
|
||||
|
||||
missing_ids = {
|
||||
product_id
|
||||
for product_id in product_ids
|
||||
if product_id not in self._version_items_by_product_id
|
||||
}
|
||||
if missing_ids:
|
||||
def version_sorted(entity):
|
||||
return entity["version"]
|
||||
|
||||
project_name = self._controller.get_current_project_name()
|
||||
version_entities_by_product_id = {
|
||||
product_id: []
|
||||
for product_id in missing_ids
|
||||
}
|
||||
|
||||
version_entities = list(ayon_api.get_versions(
|
||||
project_name,
|
||||
product_ids=missing_ids,
|
||||
fields={"id", "version", "productId", "status"}
|
||||
))
|
||||
version_entities.sort(key=version_sorted)
|
||||
for version_entity in version_entities:
|
||||
product_id = version_entity["productId"]
|
||||
version_entities_by_product_id[product_id].append(
|
||||
version_entity
|
||||
)
|
||||
|
||||
for product_id, version_entities in (
|
||||
version_entities_by_product_id.items()
|
||||
):
|
||||
last_version = abs(version_entities[-1]["version"])
|
||||
version_items_by_id = {
|
||||
entity["id"]: VersionItem.from_entity(
|
||||
entity, abs(entity["version"]) == last_version
|
||||
)
|
||||
for entity in version_entities
|
||||
}
|
||||
self._version_items_by_product_id[product_id] = (
|
||||
version_items_by_id
|
||||
)
|
||||
|
||||
return {
|
||||
product_id: dict(self._version_items_by_product_id[product_id])
|
||||
for product_id in product_ids
|
||||
}
|
||||
|
||||
def _update_cache(self):
|
||||
if self._items_cache is not None:
|
||||
return
|
||||
|
||||
host = self._controller.get_host()
|
||||
if isinstance(host, ILoadHost):
|
||||
containers = list(host.get_containers())
|
||||
elif hasattr(host, "ls"):
|
||||
containers = list(host.ls())
|
||||
else:
|
||||
containers = []
|
||||
container_items = []
|
||||
containers_by_id = {}
|
||||
container_items_by_id = {}
|
||||
for container in containers:
|
||||
item = ContainerItem.from_container_data(container)
|
||||
containers_by_id[item.item_id] = container
|
||||
container_items_by_id[item.item_id] = item
|
||||
container_items.append(item)
|
||||
|
||||
self._containers_by_id = containers_by_id
|
||||
self._container_items_by_id = container_items_by_id
|
||||
self._items_cache = container_items
|
||||
216
client/ayon_core/tools/sceneinventory/select_version_dialog.py
Normal file
216
client/ayon_core/tools/sceneinventory/select_version_dialog.py
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
import uuid
|
||||
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
||||
from ayon_core.tools.utils.delegates import StatusDelegate
|
||||
|
||||
from .model import (
|
||||
ITEM_ID_ROLE,
|
||||
STATUS_NAME_ROLE,
|
||||
STATUS_SHORT_ROLE,
|
||||
STATUS_COLOR_ROLE,
|
||||
STATUS_ICON_ROLE,
|
||||
)
|
||||
|
||||
|
||||
class VersionOption:
|
||||
def __init__(
|
||||
self,
|
||||
version,
|
||||
label,
|
||||
status_name,
|
||||
status_short,
|
||||
status_color
|
||||
):
|
||||
self.version = version
|
||||
self.label = label
|
||||
self.status_name = status_name
|
||||
self.status_short = status_short
|
||||
self.status_color = status_color
|
||||
|
||||
|
||||
class SelectVersionModel(QtGui.QStandardItemModel):
|
||||
def data(self, index, role=None):
|
||||
if role is None:
|
||||
role = QtCore.Qt.DisplayRole
|
||||
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
return super().data(index, role)
|
||||
|
||||
|
||||
class SelectVersionComboBox(QtWidgets.QComboBox):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
combo_model = SelectVersionModel(0, 2)
|
||||
|
||||
self.setModel(combo_model)
|
||||
|
||||
combo_view = QtWidgets.QTreeView(self)
|
||||
combo_view.setHeaderHidden(True)
|
||||
combo_view.setIndentation(0)
|
||||
|
||||
self.setView(combo_view)
|
||||
|
||||
header = combo_view.header()
|
||||
header.setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
|
||||
header.setSectionResizeMode(1, QtWidgets.QHeaderView.Stretch)
|
||||
|
||||
status_delegate = StatusDelegate(
|
||||
STATUS_NAME_ROLE,
|
||||
STATUS_SHORT_ROLE,
|
||||
STATUS_COLOR_ROLE,
|
||||
STATUS_ICON_ROLE,
|
||||
)
|
||||
combo_view.setItemDelegateForColumn(1, status_delegate)
|
||||
|
||||
self._combo_model = combo_model
|
||||
self._combo_view = combo_view
|
||||
self._status_delegate = status_delegate
|
||||
self._items_by_id = {}
|
||||
|
||||
def paintEvent(self, event):
|
||||
painter = QtWidgets.QStylePainter(self)
|
||||
option = QtWidgets.QStyleOptionComboBox()
|
||||
self.initStyleOption(option)
|
||||
painter.drawComplexControl(QtWidgets.QStyle.CC_ComboBox, option)
|
||||
idx = self.currentIndex()
|
||||
status_name = self.itemData(idx, STATUS_NAME_ROLE)
|
||||
if status_name is None:
|
||||
painter.drawControl(QtWidgets.QStyle.CE_ComboBoxLabel, option)
|
||||
return
|
||||
|
||||
painter.save()
|
||||
text_field_rect = self.style().subControlRect(
|
||||
QtWidgets.QStyle.CC_ComboBox,
|
||||
option,
|
||||
QtWidgets.QStyle.SC_ComboBoxEditField
|
||||
)
|
||||
adj_rect = text_field_rect.adjusted(1, 0, -1, 0)
|
||||
painter.drawText(
|
||||
adj_rect,
|
||||
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
|
||||
option.currentText
|
||||
)
|
||||
metrics = QtGui.QFontMetrics(self.font())
|
||||
text_width = metrics.width(option.currentText)
|
||||
x_offset = text_width + 2
|
||||
diff_width = adj_rect.width() - x_offset
|
||||
if diff_width <= 0:
|
||||
return
|
||||
|
||||
status_rect = adj_rect.adjusted(x_offset + 2, 0, 0, 0)
|
||||
if diff_width < metrics.width(status_name):
|
||||
status_name = self.itemData(idx, STATUS_SHORT_ROLE)
|
||||
|
||||
color = QtGui.QColor(self.itemData(idx, STATUS_COLOR_ROLE))
|
||||
|
||||
pen = painter.pen()
|
||||
pen.setColor(color)
|
||||
painter.setPen(pen)
|
||||
painter.drawText(
|
||||
status_rect,
|
||||
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
|
||||
status_name
|
||||
)
|
||||
|
||||
def set_current_index(self, index):
|
||||
model = self._combo_view.model()
|
||||
if index > model.rowCount():
|
||||
return
|
||||
|
||||
self.setCurrentIndex(index)
|
||||
|
||||
def get_item_by_id(self, item_id):
|
||||
return self._items_by_id[item_id]
|
||||
|
||||
def set_versions(self, version_options):
|
||||
self._items_by_id = {}
|
||||
model = self._combo_model
|
||||
root_item = model.invisibleRootItem()
|
||||
root_item.removeRows(0, root_item.rowCount())
|
||||
|
||||
new_items = []
|
||||
for version_option in version_options:
|
||||
item_id = uuid.uuid4().hex
|
||||
item = QtGui.QStandardItem(version_option.label)
|
||||
item.setColumnCount(root_item.columnCount())
|
||||
item.setData(
|
||||
version_option.status_name, STATUS_NAME_ROLE
|
||||
)
|
||||
item.setData(
|
||||
version_option.status_short, STATUS_SHORT_ROLE
|
||||
)
|
||||
item.setData(
|
||||
version_option.status_color, STATUS_COLOR_ROLE
|
||||
)
|
||||
item.setData(item_id, ITEM_ID_ROLE)
|
||||
|
||||
new_items.append(item)
|
||||
self._items_by_id[item_id] = version_option
|
||||
|
||||
if new_items:
|
||||
root_item.appendRows(new_items)
|
||||
|
||||
|
||||
class SelectVersionDialog(QtWidgets.QDialog):
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent=parent)
|
||||
|
||||
self.setWindowTitle("Select version")
|
||||
|
||||
label_widget = QtWidgets.QLabel("Set version number to", self)
|
||||
versions_combobox = SelectVersionComboBox(self)
|
||||
|
||||
btns_widget = QtWidgets.QWidget(self)
|
||||
|
||||
confirm_btn = QtWidgets.QPushButton("OK", btns_widget)
|
||||
cancel_btn = QtWidgets.QPushButton("Cancel", btns_widget)
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout(btns_widget)
|
||||
btns_layout.setContentsMargins(0, 0, 0, 0)
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(confirm_btn, 0)
|
||||
btns_layout.addWidget(cancel_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(label_widget, 0)
|
||||
main_layout.addWidget(versions_combobox, 0)
|
||||
main_layout.addWidget(btns_widget, 0)
|
||||
|
||||
confirm_btn.clicked.connect(self._on_confirm)
|
||||
cancel_btn.clicked.connect(self._on_cancel)
|
||||
|
||||
self._selected_item = None
|
||||
self._cancelled = False
|
||||
self._versions_combobox = versions_combobox
|
||||
|
||||
def get_selected_item(self):
|
||||
if self._cancelled:
|
||||
return None
|
||||
return self._selected_item
|
||||
|
||||
def set_versions(self, version_options):
|
||||
self._versions_combobox.set_versions(version_options)
|
||||
|
||||
def select_index(self, index):
|
||||
self._versions_combobox.set_current_index(index)
|
||||
|
||||
@classmethod
|
||||
def ask_for_version(cls, version_options, index=None, parent=None):
|
||||
dialog = cls(parent)
|
||||
dialog.set_versions(version_options)
|
||||
if index is not None:
|
||||
dialog.select_index(index)
|
||||
dialog.exec_()
|
||||
return dialog.get_selected_item()
|
||||
|
||||
def _on_confirm(self):
|
||||
self._cancelled = False
|
||||
index = self._versions_combobox.currentIndex()
|
||||
item_id = self._versions_combobox.itemData(index, ITEM_ID_ROLE)
|
||||
self._selected_item = self._versions_combobox.get_item_by_id(item_id)
|
||||
self.accept()
|
||||
|
||||
def _on_cancel(self):
|
||||
self._cancelled = True
|
||||
self.reject()
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,17 +2,10 @@ from qtpy import QtWidgets, QtCore, QtGui
|
|||
import qtawesome
|
||||
|
||||
from ayon_core import style, resources
|
||||
from ayon_core.tools.utils.lib import (
|
||||
preserve_expanded_rows,
|
||||
preserve_selection,
|
||||
)
|
||||
from ayon_core.tools.utils import PlaceholderLineEdit
|
||||
|
||||
from ayon_core.tools.sceneinventory import SceneInventoryController
|
||||
|
||||
from .delegates import VersionDelegate
|
||||
from .model import (
|
||||
InventoryModel,
|
||||
FilterProxyModel
|
||||
)
|
||||
from .view import SceneInventoryView
|
||||
|
||||
|
||||
|
|
@ -20,7 +13,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
"""Scene Inventory window"""
|
||||
|
||||
def __init__(self, controller=None, parent=None):
|
||||
super(SceneInventoryWindow, self).__init__(parent)
|
||||
super().__init__(parent)
|
||||
|
||||
if controller is None:
|
||||
controller = SceneInventoryController()
|
||||
|
|
@ -33,10 +26,9 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
|
||||
self.resize(1100, 480)
|
||||
|
||||
# region control
|
||||
|
||||
filter_label = QtWidgets.QLabel("Search", self)
|
||||
text_filter = QtWidgets.QLineEdit(self)
|
||||
text_filter = PlaceholderLineEdit(self)
|
||||
text_filter.setPlaceholderText("Filter by name...")
|
||||
|
||||
outdated_only_checkbox = QtWidgets.QCheckBox(
|
||||
"Filter to outdated", self
|
||||
|
|
@ -44,52 +36,30 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
outdated_only_checkbox.setToolTip("Show outdated files only")
|
||||
outdated_only_checkbox.setChecked(False)
|
||||
|
||||
icon = qtawesome.icon("fa.arrow-up", color="white")
|
||||
update_all_icon = qtawesome.icon("fa.arrow-up", color="white")
|
||||
update_all_button = QtWidgets.QPushButton(self)
|
||||
update_all_button.setToolTip("Update all outdated to latest version")
|
||||
update_all_button.setIcon(icon)
|
||||
update_all_button.setIcon(update_all_icon)
|
||||
|
||||
icon = qtawesome.icon("fa.refresh", color="white")
|
||||
refresh_icon = qtawesome.icon("fa.refresh", color="white")
|
||||
refresh_button = QtWidgets.QPushButton(self)
|
||||
refresh_button.setToolTip("Refresh")
|
||||
refresh_button.setIcon(icon)
|
||||
refresh_button.setIcon(refresh_icon)
|
||||
|
||||
control_layout = QtWidgets.QHBoxLayout()
|
||||
control_layout.addWidget(filter_label)
|
||||
control_layout.addWidget(text_filter)
|
||||
control_layout.addWidget(outdated_only_checkbox)
|
||||
control_layout.addWidget(update_all_button)
|
||||
control_layout.addWidget(refresh_button)
|
||||
|
||||
model = InventoryModel(controller)
|
||||
proxy = FilterProxyModel()
|
||||
proxy.setSourceModel(model)
|
||||
proxy.setDynamicSortFilter(True)
|
||||
proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
headers_widget = QtWidgets.QWidget(self)
|
||||
headers_layout = QtWidgets.QHBoxLayout(headers_widget)
|
||||
headers_layout.setContentsMargins(0, 0, 0, 0)
|
||||
headers_layout.addWidget(filter_label, 0)
|
||||
headers_layout.addWidget(text_filter, 1)
|
||||
headers_layout.addWidget(outdated_only_checkbox, 0)
|
||||
headers_layout.addWidget(update_all_button, 0)
|
||||
headers_layout.addWidget(refresh_button, 0)
|
||||
|
||||
view = SceneInventoryView(controller, self)
|
||||
view.setModel(proxy)
|
||||
|
||||
sync_enabled = controller.is_sitesync_enabled()
|
||||
view.setColumnHidden(model.active_site_col, not sync_enabled)
|
||||
view.setColumnHidden(model.remote_site_col, not sync_enabled)
|
||||
|
||||
# set some nice default widths for the view
|
||||
view.setColumnWidth(0, 250) # name
|
||||
view.setColumnWidth(1, 55) # version
|
||||
view.setColumnWidth(2, 55) # count
|
||||
view.setColumnWidth(3, 150) # product type
|
||||
view.setColumnWidth(4, 120) # group
|
||||
view.setColumnWidth(5, 150) # loader
|
||||
|
||||
# apply delegates
|
||||
version_delegate = VersionDelegate(controller, self)
|
||||
column = model.Columns.index("version")
|
||||
view.setItemDelegateForColumn(column, version_delegate)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addLayout(control_layout)
|
||||
layout.addWidget(view)
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(headers_widget, 0)
|
||||
main_layout.addWidget(view, 1)
|
||||
|
||||
show_timer = QtCore.QTimer()
|
||||
show_timer.setInterval(0)
|
||||
|
|
@ -114,12 +84,8 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
self._update_all_button = update_all_button
|
||||
self._outdated_only_checkbox = outdated_only_checkbox
|
||||
self._view = view
|
||||
self._model = model
|
||||
self._proxy = proxy
|
||||
self._version_delegate = version_delegate
|
||||
|
||||
self._first_show = True
|
||||
self._first_refresh = True
|
||||
|
||||
def showEvent(self, event):
|
||||
super(SceneInventoryWindow, self).showEvent(event)
|
||||
|
|
@ -139,29 +105,16 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
whilst trying to name an instance.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def _on_refresh_request(self):
|
||||
"""Signal callback to trigger 'refresh' without any arguments."""
|
||||
|
||||
self.refresh()
|
||||
|
||||
def refresh(self, containers=None):
|
||||
self._first_refresh = False
|
||||
def refresh(self):
|
||||
self._controller.reset()
|
||||
with preserve_expanded_rows(
|
||||
tree_view=self._view,
|
||||
role=self._model.UniqueRole
|
||||
):
|
||||
with preserve_selection(
|
||||
tree_view=self._view,
|
||||
role=self._model.UniqueRole,
|
||||
current_index=False
|
||||
):
|
||||
kwargs = {"containers": containers}
|
||||
# TODO do not touch view's inner attribute
|
||||
if self._view._hierarchy_view:
|
||||
kwargs["selected"] = self._view._selected
|
||||
self._model.refresh(**kwargs)
|
||||
self._view.refresh()
|
||||
|
||||
def _on_show_timer(self):
|
||||
if self._show_counter < 3:
|
||||
|
|
@ -171,17 +124,13 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
self.refresh()
|
||||
|
||||
def _on_hierarchy_view_change(self, enabled):
|
||||
self._proxy.set_hierarchy_view(enabled)
|
||||
self._model.set_hierarchy_view(enabled)
|
||||
self._view.set_hierarchy_view(enabled)
|
||||
|
||||
def _on_text_filter_change(self, text_filter):
|
||||
if hasattr(self._proxy, "setFilterRegExp"):
|
||||
self._proxy.setFilterRegExp(text_filter)
|
||||
else:
|
||||
self._proxy.setFilterRegularExpression(text_filter)
|
||||
self._view.set_text_filter(text_filter)
|
||||
|
||||
def _on_outdated_state_change(self):
|
||||
self._proxy.set_filter_outdated(
|
||||
self._view.set_filter_outdated(
|
||||
self._outdated_only_checkbox.isChecked()
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,5 @@
|
|||
from .broker import StdOutBroker
|
||||
|
||||
__all__ = (
|
||||
"StdOutBroker",
|
||||
)
|
||||
|
|
@ -1,173 +1,12 @@
|
|||
import os
|
||||
import sys
|
||||
import threading
|
||||
import collections
|
||||
import websocket
|
||||
import json
|
||||
from datetime import datetime
|
||||
import warnings
|
||||
from .broker import StdOutBroker
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from openpype_modules.webserver.host_console_listener import MsgAction
|
||||
warnings.warn(
|
||||
(
|
||||
"Import of 'StdOutBroker' from 'ayon_core.tools.stdout_broker.app'"
|
||||
" is deprecated. Please use 'ayon_core.tools.stdout_broker' instead."
|
||||
),
|
||||
DeprecationWarning
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class StdOutBroker:
|
||||
"""
|
||||
Application showing console in Services tray for non python hosts
|
||||
instead of cmd window.
|
||||
"""
|
||||
MAX_LINES = 10000
|
||||
TIMER_TIMEOUT = 0.200
|
||||
|
||||
def __init__(self, host_name):
|
||||
self.host_name = host_name
|
||||
self.webserver_client = None
|
||||
|
||||
self.original_stdout_write = None
|
||||
self.original_stderr_write = None
|
||||
self.log_queue = collections.deque()
|
||||
|
||||
date_str = datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
self.host_id = "{}_{}".format(self.host_name, date_str)
|
||||
|
||||
self._std_available = False
|
||||
self._is_running = False
|
||||
self._catch_std_outputs()
|
||||
|
||||
self._timer = None
|
||||
|
||||
@property
|
||||
def send_to_tray(self):
|
||||
"""Checks if connected to tray and have access to logs."""
|
||||
return self.webserver_client and self._std_available
|
||||
|
||||
def start(self):
|
||||
"""Start app, create and start timer"""
|
||||
if not self._std_available or self._is_running:
|
||||
return
|
||||
self._is_running = True
|
||||
self._create_timer()
|
||||
self._connect_to_tray()
|
||||
|
||||
def stop(self):
|
||||
"""Disconnect from Tray, process last logs"""
|
||||
if not self._is_running:
|
||||
return
|
||||
self._is_running = False
|
||||
self._process_queue()
|
||||
self._disconnect_from_tray()
|
||||
|
||||
def host_connected(self):
|
||||
"""Send to Tray console that host is ready - icon change. """
|
||||
log.info("Host {} connected".format(self.host_id))
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": MsgAction.INITIALIZED,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
self._send(payload)
|
||||
|
||||
def _create_timer(self):
|
||||
timer = threading.Timer(self.TIMER_TIMEOUT, self._timer_callback)
|
||||
timer.start()
|
||||
self._timer = timer
|
||||
|
||||
def _timer_callback(self):
|
||||
if not self._is_running:
|
||||
return
|
||||
self._process_queue()
|
||||
self._create_timer()
|
||||
|
||||
def _connect_to_tray(self):
|
||||
"""Connect to Tray webserver to pass console output. """
|
||||
if not self._std_available: # not content to log
|
||||
return
|
||||
ws = websocket.WebSocket()
|
||||
webserver_url = os.environ.get("AYON_WEBSERVER_URL")
|
||||
|
||||
if not webserver_url:
|
||||
print("Unknown webserver url, cannot connect to pass log")
|
||||
return
|
||||
|
||||
webserver_url = webserver_url.replace("http", "ws")
|
||||
ws.connect("{}/ws/host_listener".format(webserver_url))
|
||||
self.webserver_client = ws
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": MsgAction.CONNECTING,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
self._send(payload)
|
||||
|
||||
def _disconnect_from_tray(self):
|
||||
"""Send to Tray that host is closing - remove from Services. """
|
||||
print("Host {} closing".format(self.host_name))
|
||||
if not self.webserver_client:
|
||||
return
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": MsgAction.CLOSE,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
|
||||
self._send(payload)
|
||||
self.webserver_client.close()
|
||||
|
||||
def _catch_std_outputs(self):
|
||||
"""Redirects standard out and error to own functions"""
|
||||
if sys.stdout:
|
||||
self.original_stdout_write = sys.stdout.write
|
||||
sys.stdout.write = self._my_stdout_write
|
||||
self._std_available = True
|
||||
|
||||
if sys.stderr:
|
||||
self.original_stderr_write = sys.stderr.write
|
||||
sys.stderr.write = self._my_stderr_write
|
||||
self._std_available = True
|
||||
|
||||
def _my_stdout_write(self, text):
|
||||
"""Appends outputted text to queue, keep writing to original stdout"""
|
||||
if self.original_stdout_write is not None:
|
||||
self.original_stdout_write(text)
|
||||
if self.send_to_tray:
|
||||
self.log_queue.append(text)
|
||||
|
||||
def _my_stderr_write(self, text):
|
||||
"""Appends outputted text to queue, keep writing to original stderr"""
|
||||
if self.original_stderr_write is not None:
|
||||
self.original_stderr_write(text)
|
||||
if self.send_to_tray:
|
||||
self.log_queue.append(text)
|
||||
|
||||
def _process_queue(self):
|
||||
"""Sends lines and purges queue"""
|
||||
if not self.send_to_tray:
|
||||
return
|
||||
|
||||
lines = tuple(self.log_queue)
|
||||
self.log_queue.clear()
|
||||
if lines:
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": MsgAction.ADD,
|
||||
"text": "\n".join(lines)
|
||||
}
|
||||
|
||||
self._send(payload)
|
||||
|
||||
def _send(self, payload):
|
||||
"""Worker method to send to existing websocket connection."""
|
||||
if not self.send_to_tray:
|
||||
return
|
||||
|
||||
try:
|
||||
self.webserver_client.send(json.dumps(payload))
|
||||
except ConnectionResetError: # Tray closed
|
||||
self._connect_to_tray()
|
||||
__all__ = ("StdOutBroker", )
|
||||
|
|
|
|||
174
client/ayon_core/tools/stdout_broker/broker.py
Normal file
174
client/ayon_core/tools/stdout_broker/broker.py
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
import os
|
||||
import sys
|
||||
import threading
|
||||
import collections
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
import websocket
|
||||
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.modules.webserver import HostMsgAction
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class StdOutBroker:
|
||||
"""
|
||||
Application showing console in Services tray for non python hosts
|
||||
instead of cmd window.
|
||||
"""
|
||||
MAX_LINES = 10000
|
||||
TIMER_TIMEOUT = 0.200
|
||||
|
||||
def __init__(self, host_name):
|
||||
self.host_name = host_name
|
||||
self.webserver_client = None
|
||||
|
||||
self.original_stdout_write = None
|
||||
self.original_stderr_write = None
|
||||
self.log_queue = collections.deque()
|
||||
|
||||
date_str = datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
self.host_id = "{}_{}".format(self.host_name, date_str)
|
||||
|
||||
self._std_available = False
|
||||
self._is_running = False
|
||||
self._catch_std_outputs()
|
||||
|
||||
self._timer = None
|
||||
|
||||
@property
|
||||
def send_to_tray(self):
|
||||
"""Checks if connected to tray and have access to logs."""
|
||||
return self.webserver_client and self._std_available
|
||||
|
||||
def start(self):
|
||||
"""Start app, create and start timer"""
|
||||
if not self._std_available or self._is_running:
|
||||
return
|
||||
self._is_running = True
|
||||
self._create_timer()
|
||||
self._connect_to_tray()
|
||||
|
||||
def stop(self):
|
||||
"""Disconnect from Tray, process last logs"""
|
||||
if not self._is_running:
|
||||
return
|
||||
self._is_running = False
|
||||
self._process_queue()
|
||||
self._disconnect_from_tray()
|
||||
|
||||
def host_connected(self):
|
||||
"""Send to Tray console that host is ready - icon change. """
|
||||
log.info("Host {} connected".format(self.host_id))
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": HostMsgAction.INITIALIZED,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
self._send(payload)
|
||||
|
||||
def _create_timer(self):
|
||||
timer = threading.Timer(self.TIMER_TIMEOUT, self._timer_callback)
|
||||
timer.start()
|
||||
self._timer = timer
|
||||
|
||||
def _timer_callback(self):
|
||||
if not self._is_running:
|
||||
return
|
||||
self._process_queue()
|
||||
self._create_timer()
|
||||
|
||||
def _connect_to_tray(self):
|
||||
"""Connect to Tray webserver to pass console output. """
|
||||
if not self._std_available: # not content to log
|
||||
return
|
||||
ws = websocket.WebSocket()
|
||||
webserver_url = os.environ.get("AYON_WEBSERVER_URL")
|
||||
|
||||
if not webserver_url:
|
||||
print("Unknown webserver url, cannot connect to pass log")
|
||||
return
|
||||
|
||||
webserver_url = webserver_url.replace("http", "ws")
|
||||
ws.connect("{}/ws/host_listener".format(webserver_url))
|
||||
self.webserver_client = ws
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": HostMsgAction.CONNECTING,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
self._send(payload)
|
||||
|
||||
def _disconnect_from_tray(self):
|
||||
"""Send to Tray that host is closing - remove from Services. """
|
||||
print("Host {} closing".format(self.host_name))
|
||||
if not self.webserver_client:
|
||||
return
|
||||
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": HostMsgAction.CLOSE,
|
||||
"text": "Integration with {}".format(
|
||||
str.capitalize(self.host_name))
|
||||
}
|
||||
|
||||
self._send(payload)
|
||||
self.webserver_client.close()
|
||||
|
||||
def _catch_std_outputs(self):
|
||||
"""Redirects standard out and error to own functions"""
|
||||
if sys.stdout:
|
||||
self.original_stdout_write = sys.stdout.write
|
||||
sys.stdout.write = self._my_stdout_write
|
||||
self._std_available = True
|
||||
|
||||
if sys.stderr:
|
||||
self.original_stderr_write = sys.stderr.write
|
||||
sys.stderr.write = self._my_stderr_write
|
||||
self._std_available = True
|
||||
|
||||
def _my_stdout_write(self, text):
|
||||
"""Appends outputted text to queue, keep writing to original stdout"""
|
||||
if self.original_stdout_write is not None:
|
||||
self.original_stdout_write(text)
|
||||
if self.send_to_tray:
|
||||
self.log_queue.append(text)
|
||||
|
||||
def _my_stderr_write(self, text):
|
||||
"""Appends outputted text to queue, keep writing to original stderr"""
|
||||
if self.original_stderr_write is not None:
|
||||
self.original_stderr_write(text)
|
||||
if self.send_to_tray:
|
||||
self.log_queue.append(text)
|
||||
|
||||
def _process_queue(self):
|
||||
"""Sends lines and purges queue"""
|
||||
if not self.send_to_tray:
|
||||
return
|
||||
|
||||
lines = tuple(self.log_queue)
|
||||
self.log_queue.clear()
|
||||
if lines:
|
||||
payload = {
|
||||
"host": self.host_id,
|
||||
"action": HostMsgAction.ADD,
|
||||
"text": "\n".join(lines)
|
||||
}
|
||||
|
||||
self._send(payload)
|
||||
|
||||
def _send(self, payload):
|
||||
"""Worker method to send to existing websocket connection."""
|
||||
if not self.send_to_tray:
|
||||
return
|
||||
|
||||
try:
|
||||
self.webserver_client.send(json.dumps(payload))
|
||||
except ConnectionResetError: # Tray closed
|
||||
self._connect_to_tray()
|
||||
|
|
@ -447,8 +447,10 @@ class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
|
|||
|
||||
def initialize_addons(self):
|
||||
self._initializing_addons = True
|
||||
self.tray_man.initialize_addons()
|
||||
self._initializing_addons = False
|
||||
try:
|
||||
self.tray_man.initialize_addons()
|
||||
finally:
|
||||
self._initializing_addons = False
|
||||
|
||||
def _click_timer_timeout(self):
|
||||
self._click_timer.stop()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import time
|
|||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from qtpy import QtWidgets
|
||||
from qtpy import QtWidgets, QtGui
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -106,3 +106,80 @@ class PrettyTimeDelegate(QtWidgets.QStyledItemDelegate):
|
|||
def displayText(self, value, locale):
|
||||
if value is not None:
|
||||
return pretty_timestamp(value)
|
||||
|
||||
|
||||
class StatusDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""Delegate showing status name and short name."""
|
||||
def __init__(
|
||||
self,
|
||||
status_name_role,
|
||||
status_short_name_role,
|
||||
status_color_role,
|
||||
status_icon_role,
|
||||
*args, **kwargs
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.status_name_role = status_name_role
|
||||
self.status_short_name_role = status_short_name_role
|
||||
self.status_color_role = status_color_role
|
||||
self.status_icon_role = status_icon_role
|
||||
|
||||
def paint(self, painter, option, index):
|
||||
if option.widget:
|
||||
style = option.widget.style()
|
||||
else:
|
||||
style = QtWidgets.QApplication.style()
|
||||
|
||||
style.drawControl(
|
||||
QtWidgets.QCommonStyle.CE_ItemViewItem,
|
||||
option,
|
||||
painter,
|
||||
option.widget
|
||||
)
|
||||
|
||||
painter.save()
|
||||
|
||||
text_rect = style.subElementRect(
|
||||
QtWidgets.QCommonStyle.SE_ItemViewItemText,
|
||||
option
|
||||
)
|
||||
text_margin = style.proxy().pixelMetric(
|
||||
QtWidgets.QCommonStyle.PM_FocusFrameHMargin,
|
||||
option,
|
||||
option.widget
|
||||
) + 1
|
||||
padded_text_rect = text_rect.adjusted(
|
||||
text_margin, 0, - text_margin, 0
|
||||
)
|
||||
|
||||
fm = QtGui.QFontMetrics(option.font)
|
||||
text = self._get_status_name(index)
|
||||
if padded_text_rect.width() < fm.width(text):
|
||||
text = self._get_status_short_name(index)
|
||||
|
||||
fg_color = self._get_status_color(index)
|
||||
pen = painter.pen()
|
||||
pen.setColor(fg_color)
|
||||
painter.setPen(pen)
|
||||
|
||||
painter.drawText(
|
||||
padded_text_rect,
|
||||
option.displayAlignment,
|
||||
text
|
||||
)
|
||||
|
||||
painter.restore()
|
||||
|
||||
def _get_status_name(self, index):
|
||||
return index.data(self.status_name_role)
|
||||
|
||||
def _get_status_short_name(self, index):
|
||||
return index.data(self.status_short_name_role)
|
||||
|
||||
def _get_status_color(self, index):
|
||||
return QtGui.QColor(index.data(self.status_color_role))
|
||||
|
||||
def _get_status_icon(self, index):
|
||||
if self.status_icon_role is not None:
|
||||
return index.data(self.status_icon_role)
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -370,6 +370,8 @@ class FoldersWidget(QtWidgets.QWidget):
|
|||
"""
|
||||
|
||||
self._folders_proxy_model.setFilterFixedString(name)
|
||||
if name:
|
||||
self._folders_view.expandAll()
|
||||
|
||||
def refresh(self):
|
||||
"""Refresh folders model.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
import contextlib
|
||||
import collections
|
||||
from functools import partial
|
||||
|
||||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
|
|
@ -196,16 +197,16 @@ def get_openpype_qt_app():
|
|||
return get_ayon_qt_app()
|
||||
|
||||
|
||||
def iter_model_rows(model, column, include_root=False):
|
||||
def iter_model_rows(model, column=0, include_root=False):
|
||||
"""Iterate over all row indices in a model"""
|
||||
indices = [QtCore.QModelIndex()] # start iteration at root
|
||||
|
||||
for index in indices:
|
||||
indexes_queue = collections.deque()
|
||||
# start iteration at root
|
||||
indexes_queue.append(QtCore.QModelIndex())
|
||||
while indexes_queue:
|
||||
index = indexes_queue.popleft()
|
||||
# Add children to the iterations
|
||||
child_rows = model.rowCount(index)
|
||||
for child_row in range(child_rows):
|
||||
child_index = model.index(child_row, column, index)
|
||||
indices.append(child_index)
|
||||
for child_row in range(model.rowCount(index)):
|
||||
indexes_queue.append(model.index(child_row, column, index))
|
||||
|
||||
if not include_root and not index.isValid():
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -13,8 +13,10 @@ class WorkfileInfo:
|
|||
task_id (str): Task id.
|
||||
filepath (str): Filepath.
|
||||
filesize (int): File size.
|
||||
creation_time (int): Creation time (timestamp).
|
||||
modification_time (int): Modification time (timestamp).
|
||||
creation_time (float): Creation time (timestamp).
|
||||
modification_time (float): Modification time (timestamp).
|
||||
created_by (Union[str, none]): User who created the file.
|
||||
updated_by (Union[str, none]): User who last updated the file.
|
||||
note (str): Note.
|
||||
"""
|
||||
|
||||
|
|
@ -26,6 +28,8 @@ class WorkfileInfo:
|
|||
filesize,
|
||||
creation_time,
|
||||
modification_time,
|
||||
created_by,
|
||||
updated_by,
|
||||
note,
|
||||
):
|
||||
self.folder_id = folder_id
|
||||
|
|
@ -34,6 +38,8 @@ class WorkfileInfo:
|
|||
self.filesize = filesize
|
||||
self.creation_time = creation_time
|
||||
self.modification_time = modification_time
|
||||
self.created_by = created_by
|
||||
self.updated_by = updated_by
|
||||
self.note = note
|
||||
|
||||
def to_data(self):
|
||||
|
|
@ -50,6 +56,8 @@ class WorkfileInfo:
|
|||
"filesize": self.filesize,
|
||||
"creation_time": self.creation_time,
|
||||
"modification_time": self.modification_time,
|
||||
"created_by": self.created_by,
|
||||
"updated_by": self.updated_by,
|
||||
"note": self.note,
|
||||
}
|
||||
|
||||
|
|
@ -212,6 +220,7 @@ class FileItem:
|
|||
dirpath (str): Directory path of file.
|
||||
filename (str): Filename.
|
||||
modified (float): Modified timestamp.
|
||||
created_by (Optional[str]): Username.
|
||||
representation_id (Optional[str]): Representation id of published
|
||||
workfile.
|
||||
filepath (Optional[str]): Prepared filepath.
|
||||
|
|
@ -223,6 +232,8 @@ class FileItem:
|
|||
dirpath,
|
||||
filename,
|
||||
modified,
|
||||
created_by=None,
|
||||
updated_by=None,
|
||||
representation_id=None,
|
||||
filepath=None,
|
||||
exists=None
|
||||
|
|
@ -230,6 +241,8 @@ class FileItem:
|
|||
self.filename = filename
|
||||
self.dirpath = dirpath
|
||||
self.modified = modified
|
||||
self.created_by = created_by
|
||||
self.updated_by = updated_by
|
||||
self.representation_id = representation_id
|
||||
self._filepath = filepath
|
||||
self._exists = exists
|
||||
|
|
@ -269,6 +282,7 @@ class FileItem:
|
|||
"filename": self.filename,
|
||||
"dirpath": self.dirpath,
|
||||
"modified": self.modified,
|
||||
"created_by": self.created_by,
|
||||
"representation_id": self.representation_id,
|
||||
"filepath": self.filepath,
|
||||
"exists": self.exists,
|
||||
|
|
@ -522,6 +536,16 @@ class AbstractWorkfilesFrontend(AbstractWorkfilesCommon):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_user_items_by_name(self):
|
||||
"""Get user items available on AYON server.
|
||||
|
||||
Returns:
|
||||
Dict[str, UserItem]: User items by username.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
# Host information
|
||||
@abstractmethod
|
||||
def get_workfile_extensions(self):
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from ayon_core.tools.common_models import (
|
|||
HierarchyModel,
|
||||
HierarchyExpectedSelection,
|
||||
ProjectsModel,
|
||||
UsersModel,
|
||||
)
|
||||
|
||||
from .abstract import (
|
||||
|
|
@ -161,6 +162,7 @@ class BaseWorkfileController(
|
|||
self._save_is_enabled = True
|
||||
|
||||
# Expected selected folder and task
|
||||
self._users_model = self._create_users_model()
|
||||
self._expected_selection = self._create_expected_selection_obj()
|
||||
self._selection_model = self._create_selection_model()
|
||||
self._projects_model = self._create_projects_model()
|
||||
|
|
@ -176,6 +178,12 @@ class BaseWorkfileController(
|
|||
def is_host_valid(self):
|
||||
return self._host_is_valid
|
||||
|
||||
def _create_users_model(self):
|
||||
return UsersModel(self)
|
||||
|
||||
def _create_workfiles_model(self):
|
||||
return WorkfilesModel(self)
|
||||
|
||||
def _create_expected_selection_obj(self):
|
||||
return WorkfilesToolExpectedSelection(self)
|
||||
|
||||
|
|
@ -188,9 +196,6 @@ class BaseWorkfileController(
|
|||
def _create_hierarchy_model(self):
|
||||
return HierarchyModel(self)
|
||||
|
||||
def _create_workfiles_model(self):
|
||||
return WorkfilesModel(self)
|
||||
|
||||
@property
|
||||
def event_system(self):
|
||||
"""Inner event system for workfiles tool controller.
|
||||
|
|
@ -272,6 +277,9 @@ class BaseWorkfileController(
|
|||
{"enabled": enabled}
|
||||
)
|
||||
|
||||
def get_user_items_by_name(self):
|
||||
return self._users_model.get_user_items_by_name()
|
||||
|
||||
# Host information
|
||||
def get_workfile_extensions(self):
|
||||
host = self._host
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import arrow
|
|||
import ayon_api
|
||||
from ayon_api.operations import OperationsSession
|
||||
|
||||
from ayon_core.lib import get_ayon_username
|
||||
from ayon_core.pipeline.template_data import (
|
||||
get_template_data,
|
||||
get_task_template_data,
|
||||
|
|
@ -23,6 +24,8 @@ from ayon_core.tools.workfiles.abstract import (
|
|||
WorkfileInfo,
|
||||
)
|
||||
|
||||
_NOT_SET = object()
|
||||
|
||||
|
||||
class CommentMatcher(object):
|
||||
"""Use anatomy and work file data to parse comments from filenames.
|
||||
|
|
@ -188,10 +191,17 @@ class WorkareaModel:
|
|||
if ext not in self._extensions:
|
||||
continue
|
||||
|
||||
modified = os.path.getmtime(filepath)
|
||||
items.append(
|
||||
FileItem(workdir, filename, modified)
|
||||
workfile_info = self._controller.get_workfile_info(
|
||||
folder_id, task_id, filepath
|
||||
)
|
||||
modified = os.path.getmtime(filepath)
|
||||
items.append(FileItem(
|
||||
workdir,
|
||||
filename,
|
||||
modified,
|
||||
workfile_info.created_by,
|
||||
workfile_info.updated_by,
|
||||
))
|
||||
return items
|
||||
|
||||
def _get_template_key(self, fill_data):
|
||||
|
|
@ -439,6 +449,7 @@ class WorkfileEntitiesModel:
|
|||
self._controller = controller
|
||||
self._cache = {}
|
||||
self._items = {}
|
||||
self._current_username = _NOT_SET
|
||||
|
||||
def _get_workfile_info_identifier(
|
||||
self, folder_id, task_id, rootless_path
|
||||
|
|
@ -459,8 +470,12 @@ class WorkfileEntitiesModel:
|
|||
self, folder_id, task_id, workfile_info, filepath
|
||||
):
|
||||
note = ""
|
||||
created_by = None
|
||||
updated_by = None
|
||||
if workfile_info:
|
||||
note = workfile_info["attrib"].get("description") or ""
|
||||
created_by = workfile_info.get("createdBy")
|
||||
updated_by = workfile_info.get("updatedBy")
|
||||
|
||||
filestat = os.stat(filepath)
|
||||
return WorkfileInfo(
|
||||
|
|
@ -470,6 +485,8 @@ class WorkfileEntitiesModel:
|
|||
filesize=filestat.st_size,
|
||||
creation_time=filestat.st_ctime,
|
||||
modification_time=filestat.st_mtime,
|
||||
created_by=created_by,
|
||||
updated_by=updated_by,
|
||||
note=note
|
||||
)
|
||||
|
||||
|
|
@ -481,7 +498,7 @@ class WorkfileEntitiesModel:
|
|||
for workfile_info in ayon_api.get_workfiles_info(
|
||||
self._controller.get_current_project_name(),
|
||||
task_ids=[task_id],
|
||||
fields=["id", "path", "attrib"],
|
||||
fields=["id", "path", "attrib", "createdBy", "updatedBy"],
|
||||
):
|
||||
workfile_identifier = self._get_workfile_info_identifier(
|
||||
folder_id, task_id, workfile_info["path"]
|
||||
|
|
@ -525,18 +542,32 @@ class WorkfileEntitiesModel:
|
|||
self._items.pop(identifier, None)
|
||||
return
|
||||
|
||||
if note is None:
|
||||
return
|
||||
|
||||
old_note = workfile_info.get("attrib", {}).get("note")
|
||||
|
||||
new_workfile_info = copy.deepcopy(workfile_info)
|
||||
attrib = new_workfile_info.setdefault("attrib", {})
|
||||
attrib["description"] = note
|
||||
update_data = {}
|
||||
if note is not None and old_note != note:
|
||||
update_data["attrib"] = {"description": note}
|
||||
attrib = new_workfile_info.setdefault("attrib", {})
|
||||
attrib["description"] = note
|
||||
|
||||
username = self._get_current_username()
|
||||
# Automatically fix 'createdBy' and 'updatedBy' fields
|
||||
# NOTE both fields were not automatically filled by server
|
||||
# until 1.1.3 release.
|
||||
if workfile_info.get("createdBy") is None:
|
||||
update_data["createdBy"] = username
|
||||
new_workfile_info["createdBy"] = username
|
||||
|
||||
if workfile_info.get("updatedBy") != username:
|
||||
update_data["updatedBy"] = username
|
||||
new_workfile_info["updatedBy"] = username
|
||||
|
||||
if not update_data:
|
||||
return
|
||||
|
||||
self._cache[identifier] = new_workfile_info
|
||||
self._items.pop(identifier, None)
|
||||
if old_note == note:
|
||||
return
|
||||
|
||||
project_name = self._controller.get_current_project_name()
|
||||
|
||||
|
|
@ -545,7 +576,7 @@ class WorkfileEntitiesModel:
|
|||
project_name,
|
||||
"workfile",
|
||||
workfile_info["id"],
|
||||
{"attrib": {"description": note}},
|
||||
update_data,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
|
@ -554,13 +585,18 @@ class WorkfileEntitiesModel:
|
|||
|
||||
project_name = self._controller.get_current_project_name()
|
||||
|
||||
username = self._get_current_username()
|
||||
workfile_info = {
|
||||
"path": rootless_path,
|
||||
"taskId": task_id,
|
||||
"attrib": {
|
||||
"extension": extension,
|
||||
"description": note
|
||||
}
|
||||
},
|
||||
# TODO remove 'createdBy' and 'updatedBy' fields when server is
|
||||
# or above 1.1.3 .
|
||||
"createdBy": username,
|
||||
"updatedBy": username,
|
||||
}
|
||||
|
||||
session = OperationsSession()
|
||||
|
|
@ -568,6 +604,11 @@ class WorkfileEntitiesModel:
|
|||
session.commit()
|
||||
return workfile_info
|
||||
|
||||
def _get_current_username(self):
|
||||
if self._current_username is _NOT_SET:
|
||||
self._current_username = get_ayon_username()
|
||||
return self._current_username
|
||||
|
||||
|
||||
class PublishWorkfilesModel:
|
||||
"""Model for handling of published workfiles.
|
||||
|
|
@ -599,7 +640,7 @@ class PublishWorkfilesModel:
|
|||
return self._cached_repre_extensions
|
||||
|
||||
def _file_item_from_representation(
|
||||
self, repre_entity, project_anatomy, task_name=None
|
||||
self, repre_entity, project_anatomy, author, task_name=None
|
||||
):
|
||||
if task_name is not None:
|
||||
task_info = repre_entity["context"].get("task")
|
||||
|
|
@ -634,6 +675,8 @@ class PublishWorkfilesModel:
|
|||
dirpath,
|
||||
filename,
|
||||
created_at.float_timestamp,
|
||||
author,
|
||||
None,
|
||||
repre_entity["id"]
|
||||
)
|
||||
|
||||
|
|
@ -643,9 +686,9 @@ class PublishWorkfilesModel:
|
|||
# Get subset docs of folder
|
||||
product_entities = ayon_api.get_products(
|
||||
project_name,
|
||||
folder_ids=[folder_id],
|
||||
product_types=["workfile"],
|
||||
fields=["id", "name"]
|
||||
folder_ids={folder_id},
|
||||
product_types={"workfile"},
|
||||
fields={"id", "name"}
|
||||
)
|
||||
|
||||
output = []
|
||||
|
|
@ -657,25 +700,33 @@ class PublishWorkfilesModel:
|
|||
version_entities = ayon_api.get_versions(
|
||||
project_name,
|
||||
product_ids=product_ids,
|
||||
fields=["id", "productId"]
|
||||
fields={"id", "author"}
|
||||
)
|
||||
version_ids = {version["id"] for version in version_entities}
|
||||
if not version_ids:
|
||||
versions_by_id = {
|
||||
version["id"]: version
|
||||
for version in version_entities
|
||||
}
|
||||
if not versions_by_id:
|
||||
return output
|
||||
|
||||
# Query representations of filtered versions and add filter for
|
||||
# extension
|
||||
repre_entities = ayon_api.get_representations(
|
||||
project_name,
|
||||
version_ids=version_ids
|
||||
version_ids=set(versions_by_id)
|
||||
)
|
||||
project_anatomy = self._controller.project_anatomy
|
||||
|
||||
# Filter queried representations by task name if task is set
|
||||
file_items = []
|
||||
for repre_entity in repre_entities:
|
||||
version_id = repre_entity["versionId"]
|
||||
version_entity = versions_by_id[version_id]
|
||||
file_item = self._file_item_from_representation(
|
||||
repre_entity, project_anatomy, task_name
|
||||
repre_entity,
|
||||
project_anatomy,
|
||||
version_entity["author"],
|
||||
task_name,
|
||||
)
|
||||
if file_item is not None:
|
||||
file_items.append(file_item)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,8 @@ from .utils import BaseOverlayFrame
|
|||
|
||||
REPRE_ID_ROLE = QtCore.Qt.UserRole + 1
|
||||
FILEPATH_ROLE = QtCore.Qt.UserRole + 2
|
||||
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 3
|
||||
AUTHOR_ROLE = QtCore.Qt.UserRole + 3
|
||||
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 4
|
||||
|
||||
|
||||
class PublishedFilesModel(QtGui.QStandardItemModel):
|
||||
|
|
@ -23,13 +24,19 @@ class PublishedFilesModel(QtGui.QStandardItemModel):
|
|||
controller (AbstractWorkfilesFrontend): The control object.
|
||||
"""
|
||||
|
||||
columns = [
|
||||
"Name",
|
||||
"Author",
|
||||
"Date Modified",
|
||||
]
|
||||
date_modified_col = columns.index("Date Modified")
|
||||
|
||||
def __init__(self, controller):
|
||||
super(PublishedFilesModel, self).__init__()
|
||||
|
||||
self.setColumnCount(2)
|
||||
|
||||
self.setHeaderData(0, QtCore.Qt.Horizontal, "Name")
|
||||
self.setHeaderData(1, QtCore.Qt.Horizontal, "Date Modified")
|
||||
self.setColumnCount(len(self.columns))
|
||||
for idx, label in enumerate(self.columns):
|
||||
self.setHeaderData(idx, QtCore.Qt.Horizontal, label)
|
||||
|
||||
controller.register_event_callback(
|
||||
"selection.task.changed",
|
||||
|
|
@ -185,6 +192,8 @@ class PublishedFilesModel(QtGui.QStandardItemModel):
|
|||
self._remove_empty_item()
|
||||
self._remove_missing_context_item()
|
||||
|
||||
user_items_by_name = self._controller.get_user_items_by_name()
|
||||
|
||||
items_to_remove = set(self._items_by_id.keys())
|
||||
new_items = []
|
||||
for file_item in file_items:
|
||||
|
|
@ -205,8 +214,15 @@ class PublishedFilesModel(QtGui.QStandardItemModel):
|
|||
else:
|
||||
flags = QtCore.Qt.NoItemFlags
|
||||
|
||||
author = file_item.created_by
|
||||
user_item = user_items_by_name.get(author)
|
||||
if user_item is not None and user_item.full_name:
|
||||
author = user_item.full_name
|
||||
|
||||
item.setFlags(flags)
|
||||
|
||||
item.setData(file_item.filepath, FILEPATH_ROLE)
|
||||
item.setData(author, AUTHOR_ROLE)
|
||||
item.setData(file_item.modified, DATE_MODIFIED_ROLE)
|
||||
|
||||
self._items_by_id[repre_id] = item
|
||||
|
|
@ -225,22 +241,30 @@ class PublishedFilesModel(QtGui.QStandardItemModel):
|
|||
# Use flags of first column for all columns
|
||||
if index.column() != 0:
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
return super(PublishedFilesModel, self).flags(index)
|
||||
return super().flags(index)
|
||||
|
||||
def data(self, index, role=None):
|
||||
if role is None:
|
||||
role = QtCore.Qt.DisplayRole
|
||||
|
||||
# Handle roles for first column
|
||||
if index.column() == 1:
|
||||
if role == QtCore.Qt.DecorationRole:
|
||||
return None
|
||||
col = index.column()
|
||||
if col != 1:
|
||||
return super().data(index, role)
|
||||
|
||||
if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
|
||||
if role == QtCore.Qt.DecorationRole:
|
||||
return None
|
||||
|
||||
if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
|
||||
if col == 1:
|
||||
role = AUTHOR_ROLE
|
||||
elif col == 2:
|
||||
role = DATE_MODIFIED_ROLE
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
else:
|
||||
return None
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
|
||||
return super(PublishedFilesModel, self).data(index, role)
|
||||
return super().data(index, role)
|
||||
|
||||
|
||||
class SelectContextOverlay(BaseOverlayFrame):
|
||||
|
|
@ -295,7 +319,7 @@ class PublishedFilesWidget(QtWidgets.QWidget):
|
|||
view.setModel(proxy_model)
|
||||
|
||||
time_delegate = PrettyTimeDelegate()
|
||||
view.setItemDelegateForColumn(1, time_delegate)
|
||||
view.setItemDelegateForColumn(model.date_modified_col, time_delegate)
|
||||
|
||||
# Default to a wider first filename column it is what we mostly care
|
||||
# about and the date modified is relatively small anyway.
|
||||
|
|
|
|||
|
|
@ -10,7 +10,8 @@ from ayon_core.tools.utils.delegates import PrettyTimeDelegate
|
|||
|
||||
FILENAME_ROLE = QtCore.Qt.UserRole + 1
|
||||
FILEPATH_ROLE = QtCore.Qt.UserRole + 2
|
||||
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 3
|
||||
AUTHOR_ROLE = QtCore.Qt.UserRole + 3
|
||||
DATE_MODIFIED_ROLE = QtCore.Qt.UserRole + 4
|
||||
|
||||
|
||||
class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
||||
|
|
@ -21,14 +22,20 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
"""
|
||||
|
||||
refreshed = QtCore.Signal()
|
||||
columns = [
|
||||
"Name",
|
||||
"Author",
|
||||
"Date Modified",
|
||||
]
|
||||
date_modified_col = columns.index("Date Modified")
|
||||
|
||||
def __init__(self, controller):
|
||||
super(WorkAreaFilesModel, self).__init__()
|
||||
|
||||
self.setColumnCount(2)
|
||||
self.setColumnCount(len(self.columns))
|
||||
|
||||
self.setHeaderData(0, QtCore.Qt.Horizontal, "Name")
|
||||
self.setHeaderData(1, QtCore.Qt.Horizontal, "Date Modified")
|
||||
for idx, label in enumerate(self.columns):
|
||||
self.setHeaderData(idx, QtCore.Qt.Horizontal, label)
|
||||
|
||||
controller.register_event_callback(
|
||||
"selection.folder.changed",
|
||||
|
|
@ -186,6 +193,7 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
return
|
||||
self._remove_empty_item()
|
||||
self._remove_missing_context_item()
|
||||
user_items_by_name = self._controller.get_user_items_by_name()
|
||||
|
||||
items_to_remove = set(self._items_by_filename.keys())
|
||||
new_items = []
|
||||
|
|
@ -205,7 +213,13 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
item.setData(file_item.filename, QtCore.Qt.DisplayRole)
|
||||
item.setData(file_item.filename, FILENAME_ROLE)
|
||||
|
||||
updated_by = file_item.updated_by
|
||||
user_item = user_items_by_name.get(updated_by)
|
||||
if user_item is not None and user_item.full_name:
|
||||
updated_by = user_item.full_name
|
||||
|
||||
item.setData(file_item.filepath, FILEPATH_ROLE)
|
||||
item.setData(updated_by, AUTHOR_ROLE)
|
||||
item.setData(file_item.modified, DATE_MODIFIED_ROLE)
|
||||
|
||||
self._items_by_filename[file_item.filename] = item
|
||||
|
|
@ -224,22 +238,30 @@ class WorkAreaFilesModel(QtGui.QStandardItemModel):
|
|||
# Use flags of first column for all columns
|
||||
if index.column() != 0:
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
return super(WorkAreaFilesModel, self).flags(index)
|
||||
return super().flags(index)
|
||||
|
||||
def data(self, index, role=None):
|
||||
if role is None:
|
||||
role = QtCore.Qt.DisplayRole
|
||||
|
||||
# Handle roles for first column
|
||||
if index.column() == 1:
|
||||
if role == QtCore.Qt.DecorationRole:
|
||||
return None
|
||||
col = index.column()
|
||||
if col == 0:
|
||||
return super().data(index, role)
|
||||
|
||||
if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
|
||||
if role == QtCore.Qt.DecorationRole:
|
||||
return None
|
||||
|
||||
if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole):
|
||||
if col == 1:
|
||||
role = AUTHOR_ROLE
|
||||
elif col == 2:
|
||||
role = DATE_MODIFIED_ROLE
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
else:
|
||||
return None
|
||||
index = self.index(index.row(), 0, index.parent())
|
||||
|
||||
return super(WorkAreaFilesModel, self).data(index, role)
|
||||
return super().data(index, role)
|
||||
|
||||
def set_published_mode(self, published_mode):
|
||||
if self._published_mode == published_mode:
|
||||
|
|
@ -279,7 +301,7 @@ class WorkAreaFilesWidget(QtWidgets.QWidget):
|
|||
view.setModel(proxy_model)
|
||||
|
||||
time_delegate = PrettyTimeDelegate()
|
||||
view.setItemDelegateForColumn(1, time_delegate)
|
||||
view.setItemDelegateForColumn(model.date_modified_col, time_delegate)
|
||||
|
||||
# Default to a wider first filename column it is what we mostly care
|
||||
# about and the date modified is relatively small anyway.
|
||||
|
|
|
|||
|
|
@ -147,13 +147,38 @@ class SidePanelWidget(QtWidgets.QWidget):
|
|||
workfile_info.creation_time)
|
||||
modification_time = datetime.datetime.fromtimestamp(
|
||||
workfile_info.modification_time)
|
||||
|
||||
user_items_by_name = self._controller.get_user_items_by_name()
|
||||
|
||||
def convert_username(username):
|
||||
user_item = user_items_by_name.get(username)
|
||||
if user_item is not None and user_item.full_name:
|
||||
return user_item.full_name
|
||||
return username
|
||||
|
||||
created_lines = [
|
||||
creation_time.strftime(datetime_format)
|
||||
]
|
||||
if workfile_info.created_by:
|
||||
created_lines.insert(
|
||||
0, convert_username(workfile_info.created_by)
|
||||
)
|
||||
|
||||
modified_lines = [
|
||||
modification_time.strftime(datetime_format)
|
||||
]
|
||||
if workfile_info.updated_by:
|
||||
modified_lines.insert(
|
||||
0, convert_username(workfile_info.updated_by)
|
||||
)
|
||||
|
||||
lines = (
|
||||
"<b>Size:</b>",
|
||||
size_value,
|
||||
"<b>Created:</b>",
|
||||
creation_time.strftime(datetime_format),
|
||||
"<br/>".join(created_lines),
|
||||
"<b>Modified:</b>",
|
||||
modification_time.strftime(datetime_format)
|
||||
"<br/>".join(modified_lines),
|
||||
)
|
||||
self._orig_note = note
|
||||
self._note_input.setPlainText(note)
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
split_widget.addWidget(tasks_widget)
|
||||
split_widget.addWidget(col_3_widget)
|
||||
split_widget.addWidget(side_panel)
|
||||
split_widget.setSizes([255, 160, 455, 175])
|
||||
split_widget.setSizes([255, 175, 550, 190])
|
||||
|
||||
body_layout.addWidget(split_widget)
|
||||
|
||||
|
|
@ -169,7 +169,7 @@ class WorkfilesToolWindow(QtWidgets.QWidget):
|
|||
# Force focus on the open button by default, required for Houdini.
|
||||
self._files_widget.setFocus()
|
||||
|
||||
self.resize(1200, 600)
|
||||
self.resize(1260, 600)
|
||||
|
||||
def _create_col_1_widget(self, controller, parent):
|
||||
col_widget = QtWidgets.QWidget(parent)
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
## Info
|
||||
|
||||
Only **Python 2** specific modules are here.
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from ._version import __version__
|
||||
from .api import get, now, utcnow
|
||||
from .arrow import Arrow
|
||||
from .factory import ArrowFactory
|
||||
from .formatter import (
|
||||
FORMAT_ATOM,
|
||||
FORMAT_COOKIE,
|
||||
FORMAT_RFC822,
|
||||
FORMAT_RFC850,
|
||||
FORMAT_RFC1036,
|
||||
FORMAT_RFC1123,
|
||||
FORMAT_RFC2822,
|
||||
FORMAT_RFC3339,
|
||||
FORMAT_RSS,
|
||||
FORMAT_W3C,
|
||||
)
|
||||
from .parser import ParserError
|
||||
|
|
@ -1 +0,0 @@
|
|||
__version__ = "0.17.0"
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provides the default implementation of :class:`ArrowFactory <arrow.factory.ArrowFactory>`
|
||||
methods for use as a module API.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from arrow.factory import ArrowFactory
|
||||
|
||||
# internal default factory.
|
||||
_factory = ArrowFactory()
|
||||
|
||||
|
||||
def get(*args, **kwargs):
|
||||
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``get`` method."""
|
||||
|
||||
return _factory.get(*args, **kwargs)
|
||||
|
||||
|
||||
get.__doc__ = _factory.get.__doc__
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``utcnow`` method."""
|
||||
|
||||
return _factory.utcnow()
|
||||
|
||||
|
||||
utcnow.__doc__ = _factory.utcnow.__doc__
|
||||
|
||||
|
||||
def now(tz=None):
|
||||
"""Calls the default :class:`ArrowFactory <arrow.factory.ArrowFactory>` ``now`` method."""
|
||||
|
||||
return _factory.now(tz)
|
||||
|
||||
|
||||
now.__doc__ = _factory.now.__doc__
|
||||
|
||||
|
||||
def factory(type):
|
||||
"""Returns an :class:`.ArrowFactory` for the specified :class:`Arrow <arrow.arrow.Arrow>`
|
||||
or derived type.
|
||||
|
||||
:param type: the type, :class:`Arrow <arrow.arrow.Arrow>` or derived.
|
||||
|
||||
"""
|
||||
|
||||
return ArrowFactory(type)
|
||||
|
||||
|
||||
__all__ = ["get", "utcnow", "now", "factory"]
|
||||
1584
client/ayon_core/vendor/python/python_2/arrow/arrow.py
vendored
1584
client/ayon_core/vendor/python/python_2/arrow/arrow.py
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,9 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Output of time.mktime(datetime.max.timetuple()) on macOS
|
||||
# This value must be hardcoded for compatibility with Windows
|
||||
# Platform-independent max timestamps are hard to form
|
||||
# https://stackoverflow.com/q/46133223
|
||||
MAX_TIMESTAMP = 253402318799.0
|
||||
MAX_TIMESTAMP_MS = MAX_TIMESTAMP * 1000
|
||||
MAX_TIMESTAMP_US = MAX_TIMESTAMP * 1000000
|
||||
|
|
@ -1,301 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Implements the :class:`ArrowFactory <arrow.factory.ArrowFactory>` class,
|
||||
providing factory methods for common :class:`Arrow <arrow.arrow.Arrow>`
|
||||
construction scenarios.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import calendar
|
||||
from datetime import date, datetime
|
||||
from datetime import tzinfo as dt_tzinfo
|
||||
from time import struct_time
|
||||
|
||||
from dateutil import tz as dateutil_tz
|
||||
|
||||
from arrow import parser
|
||||
from arrow.arrow import Arrow
|
||||
from arrow.util import is_timestamp, iso_to_gregorian, isstr
|
||||
|
||||
|
||||
class ArrowFactory(object):
|
||||
"""A factory for generating :class:`Arrow <arrow.arrow.Arrow>` objects.
|
||||
|
||||
:param type: (optional) the :class:`Arrow <arrow.arrow.Arrow>`-based class to construct from.
|
||||
Defaults to :class:`Arrow <arrow.arrow.Arrow>`.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, type=Arrow):
|
||||
self.type = type
|
||||
|
||||
def get(self, *args, **kwargs):
|
||||
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object based on flexible inputs.
|
||||
|
||||
:param locale: (optional) a ``str`` specifying a locale for the parser. Defaults to 'en_us'.
|
||||
:param tzinfo: (optional) a :ref:`timezone expression <tz-expr>` or tzinfo object.
|
||||
Replaces the timezone unless using an input form that is explicitly UTC or specifies
|
||||
the timezone in a positional argument. Defaults to UTC.
|
||||
:param normalize_whitespace: (optional) a ``bool`` specifying whether or not to normalize
|
||||
redundant whitespace (spaces, tabs, and newlines) in a datetime string before parsing.
|
||||
Defaults to false.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
|
||||
**No inputs** to get current UTC time::
|
||||
|
||||
>>> arrow.get()
|
||||
<Arrow [2013-05-08T05:51:43.316458+00:00]>
|
||||
|
||||
**None** to also get current UTC time::
|
||||
|
||||
>>> arrow.get(None)
|
||||
<Arrow [2013-05-08T05:51:49.016458+00:00]>
|
||||
|
||||
**One** :class:`Arrow <arrow.arrow.Arrow>` object, to get a copy.
|
||||
|
||||
>>> arw = arrow.utcnow()
|
||||
>>> arrow.get(arw)
|
||||
<Arrow [2013-10-23T15:21:54.354846+00:00]>
|
||||
|
||||
**One** ``float`` or ``int``, convertible to a floating-point timestamp, to get
|
||||
that timestamp in UTC::
|
||||
|
||||
>>> arrow.get(1367992474.293378)
|
||||
<Arrow [2013-05-08T05:54:34.293378+00:00]>
|
||||
|
||||
>>> arrow.get(1367992474)
|
||||
<Arrow [2013-05-08T05:54:34+00:00]>
|
||||
|
||||
**One** ISO 8601-formatted ``str``, to parse it::
|
||||
|
||||
>>> arrow.get('2013-09-29T01:26:43.830580')
|
||||
<Arrow [2013-09-29T01:26:43.830580+00:00]>
|
||||
|
||||
**One** ISO 8601-formatted ``str``, in basic format, to parse it::
|
||||
|
||||
>>> arrow.get('20160413T133656.456289')
|
||||
<Arrow [2016-04-13T13:36:56.456289+00:00]>
|
||||
|
||||
**One** ``tzinfo``, to get the current time **converted** to that timezone::
|
||||
|
||||
>>> arrow.get(tz.tzlocal())
|
||||
<Arrow [2013-05-07T22:57:28.484717-07:00]>
|
||||
|
||||
**One** naive ``datetime``, to get that datetime in UTC::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5))
|
||||
<Arrow [2013-05-05T00:00:00+00:00]>
|
||||
|
||||
**One** aware ``datetime``, to get that datetime::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5, tzinfo=tz.tzlocal()))
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**One** naive ``date``, to get that date in UTC::
|
||||
|
||||
>>> arrow.get(date(2013, 5, 5))
|
||||
<Arrow [2013-05-05T00:00:00+00:00]>
|
||||
|
||||
**One** time.struct time::
|
||||
|
||||
>>> arrow.get(gmtime(0))
|
||||
<Arrow [1970-01-01T00:00:00+00:00]>
|
||||
|
||||
**One** iso calendar ``tuple``, to get that week date in UTC::
|
||||
|
||||
>>> arrow.get((2013, 18, 7))
|
||||
<Arrow [2013-05-05T00:00:00+00:00]>
|
||||
|
||||
**Two** arguments, a naive or aware ``datetime``, and a replacement
|
||||
:ref:`timezone expression <tz-expr>`::
|
||||
|
||||
>>> arrow.get(datetime(2013, 5, 5), 'US/Pacific')
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**Two** arguments, a naive ``date``, and a replacement
|
||||
:ref:`timezone expression <tz-expr>`::
|
||||
|
||||
>>> arrow.get(date(2013, 5, 5), 'US/Pacific')
|
||||
<Arrow [2013-05-05T00:00:00-07:00]>
|
||||
|
||||
**Two** arguments, both ``str``, to parse the first according to the format of the second::
|
||||
|
||||
>>> arrow.get('2013-05-05 12:30:45 America/Chicago', 'YYYY-MM-DD HH:mm:ss ZZZ')
|
||||
<Arrow [2013-05-05T12:30:45-05:00]>
|
||||
|
||||
**Two** arguments, first a ``str`` to parse and second a ``list`` of formats to try::
|
||||
|
||||
>>> arrow.get('2013-05-05 12:30:45', ['MM/DD/YYYY', 'YYYY-MM-DD HH:mm:ss'])
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
**Three or more** arguments, as for the constructor of a ``datetime``::
|
||||
|
||||
>>> arrow.get(2013, 5, 5, 12, 30, 45)
|
||||
<Arrow [2013-05-05T12:30:45+00:00]>
|
||||
|
||||
"""
|
||||
|
||||
arg_count = len(args)
|
||||
locale = kwargs.pop("locale", "en_us")
|
||||
tz = kwargs.get("tzinfo", None)
|
||||
normalize_whitespace = kwargs.pop("normalize_whitespace", False)
|
||||
|
||||
# if kwargs given, send to constructor unless only tzinfo provided
|
||||
if len(kwargs) > 1:
|
||||
arg_count = 3
|
||||
|
||||
# tzinfo kwarg is not provided
|
||||
if len(kwargs) == 1 and tz is None:
|
||||
arg_count = 3
|
||||
|
||||
# () -> now, @ utc.
|
||||
if arg_count == 0:
|
||||
if isstr(tz):
|
||||
tz = parser.TzinfoParser.parse(tz)
|
||||
return self.type.now(tz)
|
||||
|
||||
if isinstance(tz, dt_tzinfo):
|
||||
return self.type.now(tz)
|
||||
|
||||
return self.type.utcnow()
|
||||
|
||||
if arg_count == 1:
|
||||
arg = args[0]
|
||||
|
||||
# (None) -> now, @ utc.
|
||||
if arg is None:
|
||||
return self.type.utcnow()
|
||||
|
||||
# try (int, float) -> from timestamp with tz
|
||||
elif not isstr(arg) and is_timestamp(arg):
|
||||
if tz is None:
|
||||
# set to UTC by default
|
||||
tz = dateutil_tz.tzutc()
|
||||
return self.type.fromtimestamp(arg, tzinfo=tz)
|
||||
|
||||
# (Arrow) -> from the object's datetime.
|
||||
elif isinstance(arg, Arrow):
|
||||
return self.type.fromdatetime(arg.datetime)
|
||||
|
||||
# (datetime) -> from datetime.
|
||||
elif isinstance(arg, datetime):
|
||||
return self.type.fromdatetime(arg)
|
||||
|
||||
# (date) -> from date.
|
||||
elif isinstance(arg, date):
|
||||
return self.type.fromdate(arg)
|
||||
|
||||
# (tzinfo) -> now, @ tzinfo.
|
||||
elif isinstance(arg, dt_tzinfo):
|
||||
return self.type.now(arg)
|
||||
|
||||
# (str) -> parse.
|
||||
elif isstr(arg):
|
||||
dt = parser.DateTimeParser(locale).parse_iso(arg, normalize_whitespace)
|
||||
return self.type.fromdatetime(dt, tz)
|
||||
|
||||
# (struct_time) -> from struct_time
|
||||
elif isinstance(arg, struct_time):
|
||||
return self.type.utcfromtimestamp(calendar.timegm(arg))
|
||||
|
||||
# (iso calendar) -> convert then from date
|
||||
elif isinstance(arg, tuple) and len(arg) == 3:
|
||||
dt = iso_to_gregorian(*arg)
|
||||
return self.type.fromdate(dt)
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Can't parse single argument of type '{}'".format(type(arg))
|
||||
)
|
||||
|
||||
elif arg_count == 2:
|
||||
|
||||
arg_1, arg_2 = args[0], args[1]
|
||||
|
||||
if isinstance(arg_1, datetime):
|
||||
|
||||
# (datetime, tzinfo/str) -> fromdatetime replace tzinfo.
|
||||
if isinstance(arg_2, dt_tzinfo) or isstr(arg_2):
|
||||
return self.type.fromdatetime(arg_1, arg_2)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Can't parse two arguments of types 'datetime', '{}'".format(
|
||||
type(arg_2)
|
||||
)
|
||||
)
|
||||
|
||||
elif isinstance(arg_1, date):
|
||||
|
||||
# (date, tzinfo/str) -> fromdate replace tzinfo.
|
||||
if isinstance(arg_2, dt_tzinfo) or isstr(arg_2):
|
||||
return self.type.fromdate(arg_1, tzinfo=arg_2)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Can't parse two arguments of types 'date', '{}'".format(
|
||||
type(arg_2)
|
||||
)
|
||||
)
|
||||
|
||||
# (str, format) -> parse.
|
||||
elif isstr(arg_1) and (isstr(arg_2) or isinstance(arg_2, list)):
|
||||
dt = parser.DateTimeParser(locale).parse(
|
||||
args[0], args[1], normalize_whitespace
|
||||
)
|
||||
return self.type.fromdatetime(dt, tzinfo=tz)
|
||||
|
||||
else:
|
||||
raise TypeError(
|
||||
"Can't parse two arguments of types '{}' and '{}'".format(
|
||||
type(arg_1), type(arg_2)
|
||||
)
|
||||
)
|
||||
|
||||
# 3+ args -> datetime-like via constructor.
|
||||
else:
|
||||
return self.type(*args, **kwargs)
|
||||
|
||||
def utcnow(self):
|
||||
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in UTC time.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
>>> arrow.utcnow()
|
||||
<Arrow [2013-05-08T05:19:07.018993+00:00]>
|
||||
"""
|
||||
|
||||
return self.type.utcnow()
|
||||
|
||||
def now(self, tz=None):
|
||||
"""Returns an :class:`Arrow <arrow.arrow.Arrow>` object, representing "now" in the given
|
||||
timezone.
|
||||
|
||||
:param tz: (optional) A :ref:`timezone expression <tz-expr>`. Defaults to local time.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import arrow
|
||||
>>> arrow.now()
|
||||
<Arrow [2013-05-07T22:19:11.363410-07:00]>
|
||||
|
||||
>>> arrow.now('US/Pacific')
|
||||
<Arrow [2013-05-07T22:19:15.251821-07:00]>
|
||||
|
||||
>>> arrow.now('+02:00')
|
||||
<Arrow [2013-05-08T07:19:25.618646+02:00]>
|
||||
|
||||
>>> arrow.now('local')
|
||||
<Arrow [2013-05-07T22:19:39.130059-07:00]>
|
||||
"""
|
||||
|
||||
if tz is None:
|
||||
tz = dateutil_tz.tzlocal()
|
||||
elif not isinstance(tz, dt_tzinfo):
|
||||
tz = parser.TzinfoParser.parse(tz)
|
||||
|
||||
return self.type.now(tz)
|
||||
|
|
@ -1,139 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, division
|
||||
|
||||
import calendar
|
||||
import re
|
||||
|
||||
from dateutil import tz as dateutil_tz
|
||||
|
||||
from arrow import locales, util
|
||||
|
||||
FORMAT_ATOM = "YYYY-MM-DD HH:mm:ssZZ"
|
||||
FORMAT_COOKIE = "dddd, DD-MMM-YYYY HH:mm:ss ZZZ"
|
||||
FORMAT_RFC822 = "ddd, DD MMM YY HH:mm:ss Z"
|
||||
FORMAT_RFC850 = "dddd, DD-MMM-YY HH:mm:ss ZZZ"
|
||||
FORMAT_RFC1036 = "ddd, DD MMM YY HH:mm:ss Z"
|
||||
FORMAT_RFC1123 = "ddd, DD MMM YYYY HH:mm:ss Z"
|
||||
FORMAT_RFC2822 = "ddd, DD MMM YYYY HH:mm:ss Z"
|
||||
FORMAT_RFC3339 = "YYYY-MM-DD HH:mm:ssZZ"
|
||||
FORMAT_RSS = "ddd, DD MMM YYYY HH:mm:ss Z"
|
||||
FORMAT_W3C = "YYYY-MM-DD HH:mm:ssZZ"
|
||||
|
||||
|
||||
class DateTimeFormatter(object):
|
||||
|
||||
# This pattern matches characters enclosed in square brackets are matched as
|
||||
# an atomic group. For more info on atomic groups and how to they are
|
||||
# emulated in Python's re library, see https://stackoverflow.com/a/13577411/2701578
|
||||
|
||||
_FORMAT_RE = re.compile(
|
||||
r"(\[(?:(?=(?P<literal>[^]]))(?P=literal))*\]|YYY?Y?|MM?M?M?|Do|DD?D?D?|d?dd?d?|HH?|hh?|mm?|ss?|SS?S?S?S?S?|ZZ?Z?|a|A|X|x|W)"
|
||||
)
|
||||
|
||||
def __init__(self, locale="en_us"):
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
|
||||
def format(cls, dt, fmt):
|
||||
|
||||
return cls._FORMAT_RE.sub(lambda m: cls._format_token(dt, m.group(0)), fmt)
|
||||
|
||||
def _format_token(self, dt, token):
|
||||
|
||||
if token and token.startswith("[") and token.endswith("]"):
|
||||
return token[1:-1]
|
||||
|
||||
if token == "YYYY":
|
||||
return self.locale.year_full(dt.year)
|
||||
if token == "YY":
|
||||
return self.locale.year_abbreviation(dt.year)
|
||||
|
||||
if token == "MMMM":
|
||||
return self.locale.month_name(dt.month)
|
||||
if token == "MMM":
|
||||
return self.locale.month_abbreviation(dt.month)
|
||||
if token == "MM":
|
||||
return "{:02d}".format(dt.month)
|
||||
if token == "M":
|
||||
return str(dt.month)
|
||||
|
||||
if token == "DDDD":
|
||||
return "{:03d}".format(dt.timetuple().tm_yday)
|
||||
if token == "DDD":
|
||||
return str(dt.timetuple().tm_yday)
|
||||
if token == "DD":
|
||||
return "{:02d}".format(dt.day)
|
||||
if token == "D":
|
||||
return str(dt.day)
|
||||
|
||||
if token == "Do":
|
||||
return self.locale.ordinal_number(dt.day)
|
||||
|
||||
if token == "dddd":
|
||||
return self.locale.day_name(dt.isoweekday())
|
||||
if token == "ddd":
|
||||
return self.locale.day_abbreviation(dt.isoweekday())
|
||||
if token == "d":
|
||||
return str(dt.isoweekday())
|
||||
|
||||
if token == "HH":
|
||||
return "{:02d}".format(dt.hour)
|
||||
if token == "H":
|
||||
return str(dt.hour)
|
||||
if token == "hh":
|
||||
return "{:02d}".format(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
|
||||
if token == "h":
|
||||
return str(dt.hour if 0 < dt.hour < 13 else abs(dt.hour - 12))
|
||||
|
||||
if token == "mm":
|
||||
return "{:02d}".format(dt.minute)
|
||||
if token == "m":
|
||||
return str(dt.minute)
|
||||
|
||||
if token == "ss":
|
||||
return "{:02d}".format(dt.second)
|
||||
if token == "s":
|
||||
return str(dt.second)
|
||||
|
||||
if token == "SSSSSS":
|
||||
return str("{:06d}".format(int(dt.microsecond)))
|
||||
if token == "SSSSS":
|
||||
return str("{:05d}".format(int(dt.microsecond / 10)))
|
||||
if token == "SSSS":
|
||||
return str("{:04d}".format(int(dt.microsecond / 100)))
|
||||
if token == "SSS":
|
||||
return str("{:03d}".format(int(dt.microsecond / 1000)))
|
||||
if token == "SS":
|
||||
return str("{:02d}".format(int(dt.microsecond / 10000)))
|
||||
if token == "S":
|
||||
return str(int(dt.microsecond / 100000))
|
||||
|
||||
if token == "X":
|
||||
# TODO: replace with a call to dt.timestamp() when we drop Python 2.7
|
||||
return str(calendar.timegm(dt.utctimetuple()))
|
||||
|
||||
if token == "x":
|
||||
# TODO: replace with a call to dt.timestamp() when we drop Python 2.7
|
||||
ts = calendar.timegm(dt.utctimetuple()) + (dt.microsecond / 1000000)
|
||||
return str(int(ts * 1000000))
|
||||
|
||||
if token == "ZZZ":
|
||||
return dt.tzname()
|
||||
|
||||
if token in ["ZZ", "Z"]:
|
||||
separator = ":" if token == "ZZ" else ""
|
||||
tz = dateutil_tz.tzutc() if dt.tzinfo is None else dt.tzinfo
|
||||
total_minutes = int(util.total_seconds(tz.utcoffset(dt)) / 60)
|
||||
|
||||
sign = "+" if total_minutes >= 0 else "-"
|
||||
total_minutes = abs(total_minutes)
|
||||
hour, minute = divmod(total_minutes, 60)
|
||||
|
||||
return "{}{:02d}{}{:02d}".format(sign, hour, separator, minute)
|
||||
|
||||
if token in ("a", "A"):
|
||||
return self.locale.meridian(dt.hour, token)
|
||||
|
||||
if token == "W":
|
||||
year, week, day = dt.isocalendar()
|
||||
return "{}-W{:02d}-{}".format(year, week, day)
|
||||
4267
client/ayon_core/vendor/python/python_2/arrow/locales.py
vendored
4267
client/ayon_core/vendor/python/python_2/arrow/locales.py
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,596 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from dateutil import tz
|
||||
|
||||
from arrow import locales
|
||||
from arrow.util import iso_to_gregorian, next_weekday, normalize_timestamp
|
||||
|
||||
try:
|
||||
from functools import lru_cache
|
||||
except ImportError: # pragma: no cover
|
||||
from backports.functools_lru_cache import lru_cache # pragma: no cover
|
||||
|
||||
|
||||
class ParserError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
# Allows for ParserErrors to be propagated from _build_datetime()
|
||||
# when day_of_year errors occur.
|
||||
# Before this, the ParserErrors were caught by the try/except in
|
||||
# _parse_multiformat() and the appropriate error message was not
|
||||
# transmitted to the user.
|
||||
class ParserMatchError(ParserError):
|
||||
pass
|
||||
|
||||
|
||||
class DateTimeParser(object):
|
||||
|
||||
_FORMAT_RE = re.compile(
|
||||
r"(YYY?Y?|MM?M?M?|Do|DD?D?D?|d?d?d?d|HH?|hh?|mm?|ss?|S+|ZZ?Z?|a|A|x|X|W)"
|
||||
)
|
||||
_ESCAPE_RE = re.compile(r"\[[^\[\]]*\]")
|
||||
|
||||
_ONE_OR_TWO_DIGIT_RE = re.compile(r"\d{1,2}")
|
||||
_ONE_OR_TWO_OR_THREE_DIGIT_RE = re.compile(r"\d{1,3}")
|
||||
_ONE_OR_MORE_DIGIT_RE = re.compile(r"\d+")
|
||||
_TWO_DIGIT_RE = re.compile(r"\d{2}")
|
||||
_THREE_DIGIT_RE = re.compile(r"\d{3}")
|
||||
_FOUR_DIGIT_RE = re.compile(r"\d{4}")
|
||||
_TZ_Z_RE = re.compile(r"([\+\-])(\d{2})(?:(\d{2}))?|Z")
|
||||
_TZ_ZZ_RE = re.compile(r"([\+\-])(\d{2})(?:\:(\d{2}))?|Z")
|
||||
_TZ_NAME_RE = re.compile(r"\w[\w+\-/]+")
|
||||
# NOTE: timestamps cannot be parsed from natural language strings (by removing the ^...$) because it will
|
||||
# break cases like "15 Jul 2000" and a format list (see issue #447)
|
||||
_TIMESTAMP_RE = re.compile(r"^\-?\d+\.?\d+$")
|
||||
_TIMESTAMP_EXPANDED_RE = re.compile(r"^\-?\d+$")
|
||||
_TIME_RE = re.compile(r"^(\d{2})(?:\:?(\d{2}))?(?:\:?(\d{2}))?(?:([\.\,])(\d+))?$")
|
||||
_WEEK_DATE_RE = re.compile(r"(?P<year>\d{4})[\-]?W(?P<week>\d{2})[\-]?(?P<day>\d)?")
|
||||
|
||||
_BASE_INPUT_RE_MAP = {
|
||||
"YYYY": _FOUR_DIGIT_RE,
|
||||
"YY": _TWO_DIGIT_RE,
|
||||
"MM": _TWO_DIGIT_RE,
|
||||
"M": _ONE_OR_TWO_DIGIT_RE,
|
||||
"DDDD": _THREE_DIGIT_RE,
|
||||
"DDD": _ONE_OR_TWO_OR_THREE_DIGIT_RE,
|
||||
"DD": _TWO_DIGIT_RE,
|
||||
"D": _ONE_OR_TWO_DIGIT_RE,
|
||||
"HH": _TWO_DIGIT_RE,
|
||||
"H": _ONE_OR_TWO_DIGIT_RE,
|
||||
"hh": _TWO_DIGIT_RE,
|
||||
"h": _ONE_OR_TWO_DIGIT_RE,
|
||||
"mm": _TWO_DIGIT_RE,
|
||||
"m": _ONE_OR_TWO_DIGIT_RE,
|
||||
"ss": _TWO_DIGIT_RE,
|
||||
"s": _ONE_OR_TWO_DIGIT_RE,
|
||||
"X": _TIMESTAMP_RE,
|
||||
"x": _TIMESTAMP_EXPANDED_RE,
|
||||
"ZZZ": _TZ_NAME_RE,
|
||||
"ZZ": _TZ_ZZ_RE,
|
||||
"Z": _TZ_Z_RE,
|
||||
"S": _ONE_OR_MORE_DIGIT_RE,
|
||||
"W": _WEEK_DATE_RE,
|
||||
}
|
||||
|
||||
SEPARATORS = ["-", "/", "."]
|
||||
|
||||
def __init__(self, locale="en_us", cache_size=0):
|
||||
|
||||
self.locale = locales.get_locale(locale)
|
||||
self._input_re_map = self._BASE_INPUT_RE_MAP.copy()
|
||||
self._input_re_map.update(
|
||||
{
|
||||
"MMMM": self._generate_choice_re(
|
||||
self.locale.month_names[1:], re.IGNORECASE
|
||||
),
|
||||
"MMM": self._generate_choice_re(
|
||||
self.locale.month_abbreviations[1:], re.IGNORECASE
|
||||
),
|
||||
"Do": re.compile(self.locale.ordinal_day_re),
|
||||
"dddd": self._generate_choice_re(
|
||||
self.locale.day_names[1:], re.IGNORECASE
|
||||
),
|
||||
"ddd": self._generate_choice_re(
|
||||
self.locale.day_abbreviations[1:], re.IGNORECASE
|
||||
),
|
||||
"d": re.compile(r"[1-7]"),
|
||||
"a": self._generate_choice_re(
|
||||
(self.locale.meridians["am"], self.locale.meridians["pm"])
|
||||
),
|
||||
# note: 'A' token accepts both 'am/pm' and 'AM/PM' formats to
|
||||
# ensure backwards compatibility of this token
|
||||
"A": self._generate_choice_re(self.locale.meridians.values()),
|
||||
}
|
||||
)
|
||||
if cache_size > 0:
|
||||
self._generate_pattern_re = lru_cache(maxsize=cache_size)(
|
||||
self._generate_pattern_re
|
||||
)
|
||||
|
||||
# TODO: since we support more than ISO 8601, we should rename this function
|
||||
# IDEA: break into multiple functions
|
||||
def parse_iso(self, datetime_string, normalize_whitespace=False):
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string.strip())
|
||||
|
||||
has_space_divider = " " in datetime_string
|
||||
has_t_divider = "T" in datetime_string
|
||||
|
||||
num_spaces = datetime_string.count(" ")
|
||||
if has_space_divider and num_spaces != 1 or has_t_divider and num_spaces > 0:
|
||||
raise ParserError(
|
||||
"Expected an ISO 8601-like string, but was given '{}'. Try passing in a format string to resolve this.".format(
|
||||
datetime_string
|
||||
)
|
||||
)
|
||||
|
||||
has_time = has_space_divider or has_t_divider
|
||||
has_tz = False
|
||||
|
||||
# date formats (ISO 8601 and others) to test against
|
||||
# NOTE: YYYYMM is omitted to avoid confusion with YYMMDD (no longer part of ISO 8601, but is still often used)
|
||||
formats = [
|
||||
"YYYY-MM-DD",
|
||||
"YYYY-M-DD",
|
||||
"YYYY-M-D",
|
||||
"YYYY/MM/DD",
|
||||
"YYYY/M/DD",
|
||||
"YYYY/M/D",
|
||||
"YYYY.MM.DD",
|
||||
"YYYY.M.DD",
|
||||
"YYYY.M.D",
|
||||
"YYYYMMDD",
|
||||
"YYYY-DDDD",
|
||||
"YYYYDDDD",
|
||||
"YYYY-MM",
|
||||
"YYYY/MM",
|
||||
"YYYY.MM",
|
||||
"YYYY",
|
||||
"W",
|
||||
]
|
||||
|
||||
if has_time:
|
||||
|
||||
if has_space_divider:
|
||||
date_string, time_string = datetime_string.split(" ", 1)
|
||||
else:
|
||||
date_string, time_string = datetime_string.split("T", 1)
|
||||
|
||||
time_parts = re.split(r"[\+\-Z]", time_string, 1, re.IGNORECASE)
|
||||
|
||||
time_components = self._TIME_RE.match(time_parts[0])
|
||||
|
||||
if time_components is None:
|
||||
raise ParserError(
|
||||
"Invalid time component provided. Please specify a format or provide a valid time component in the basic or extended ISO 8601 time format."
|
||||
)
|
||||
|
||||
(
|
||||
hours,
|
||||
minutes,
|
||||
seconds,
|
||||
subseconds_sep,
|
||||
subseconds,
|
||||
) = time_components.groups()
|
||||
|
||||
has_tz = len(time_parts) == 2
|
||||
has_minutes = minutes is not None
|
||||
has_seconds = seconds is not None
|
||||
has_subseconds = subseconds is not None
|
||||
|
||||
is_basic_time_format = ":" not in time_parts[0]
|
||||
tz_format = "Z"
|
||||
|
||||
# use 'ZZ' token instead since tz offset is present in non-basic format
|
||||
if has_tz and ":" in time_parts[1]:
|
||||
tz_format = "ZZ"
|
||||
|
||||
time_sep = "" if is_basic_time_format else ":"
|
||||
|
||||
if has_subseconds:
|
||||
time_string = "HH{time_sep}mm{time_sep}ss{subseconds_sep}S".format(
|
||||
time_sep=time_sep, subseconds_sep=subseconds_sep
|
||||
)
|
||||
elif has_seconds:
|
||||
time_string = "HH{time_sep}mm{time_sep}ss".format(time_sep=time_sep)
|
||||
elif has_minutes:
|
||||
time_string = "HH{time_sep}mm".format(time_sep=time_sep)
|
||||
else:
|
||||
time_string = "HH"
|
||||
|
||||
if has_space_divider:
|
||||
formats = ["{} {}".format(f, time_string) for f in formats]
|
||||
else:
|
||||
formats = ["{}T{}".format(f, time_string) for f in formats]
|
||||
|
||||
if has_time and has_tz:
|
||||
# Add "Z" or "ZZ" to the format strings to indicate to
|
||||
# _parse_token() that a timezone needs to be parsed
|
||||
formats = ["{}{}".format(f, tz_format) for f in formats]
|
||||
|
||||
return self._parse_multiformat(datetime_string, formats)
|
||||
|
||||
def parse(self, datetime_string, fmt, normalize_whitespace=False):
|
||||
|
||||
if normalize_whitespace:
|
||||
datetime_string = re.sub(r"\s+", " ", datetime_string)
|
||||
|
||||
if isinstance(fmt, list):
|
||||
return self._parse_multiformat(datetime_string, fmt)
|
||||
|
||||
fmt_tokens, fmt_pattern_re = self._generate_pattern_re(fmt)
|
||||
|
||||
match = fmt_pattern_re.search(datetime_string)
|
||||
|
||||
if match is None:
|
||||
raise ParserMatchError(
|
||||
"Failed to match '{}' when parsing '{}'".format(fmt, datetime_string)
|
||||
)
|
||||
|
||||
parts = {}
|
||||
for token in fmt_tokens:
|
||||
if token == "Do":
|
||||
value = match.group("value")
|
||||
elif token == "W":
|
||||
value = (match.group("year"), match.group("week"), match.group("day"))
|
||||
else:
|
||||
value = match.group(token)
|
||||
self._parse_token(token, value, parts)
|
||||
|
||||
return self._build_datetime(parts)
|
||||
|
||||
def _generate_pattern_re(self, fmt):
|
||||
|
||||
# fmt is a string of tokens like 'YYYY-MM-DD'
|
||||
# we construct a new string by replacing each
|
||||
# token by its pattern:
|
||||
# 'YYYY-MM-DD' -> '(?P<YYYY>\d{4})-(?P<MM>\d{2})-(?P<DD>\d{2})'
|
||||
tokens = []
|
||||
offset = 0
|
||||
|
||||
# Escape all special RegEx chars
|
||||
escaped_fmt = re.escape(fmt)
|
||||
|
||||
# Extract the bracketed expressions to be reinserted later.
|
||||
escaped_fmt = re.sub(self._ESCAPE_RE, "#", escaped_fmt)
|
||||
|
||||
# Any number of S is the same as one.
|
||||
# TODO: allow users to specify the number of digits to parse
|
||||
escaped_fmt = re.sub(r"S+", "S", escaped_fmt)
|
||||
|
||||
escaped_data = re.findall(self._ESCAPE_RE, fmt)
|
||||
|
||||
fmt_pattern = escaped_fmt
|
||||
|
||||
for m in self._FORMAT_RE.finditer(escaped_fmt):
|
||||
token = m.group(0)
|
||||
try:
|
||||
input_re = self._input_re_map[token]
|
||||
except KeyError:
|
||||
raise ParserError("Unrecognized token '{}'".format(token))
|
||||
input_pattern = "(?P<{}>{})".format(token, input_re.pattern)
|
||||
tokens.append(token)
|
||||
# a pattern doesn't have the same length as the token
|
||||
# it replaces! We keep the difference in the offset variable.
|
||||
# This works because the string is scanned left-to-right and matches
|
||||
# are returned in the order found by finditer.
|
||||
fmt_pattern = (
|
||||
fmt_pattern[: m.start() + offset]
|
||||
+ input_pattern
|
||||
+ fmt_pattern[m.end() + offset :]
|
||||
)
|
||||
offset += len(input_pattern) - (m.end() - m.start())
|
||||
|
||||
final_fmt_pattern = ""
|
||||
split_fmt = fmt_pattern.split(r"\#")
|
||||
|
||||
# Due to the way Python splits, 'split_fmt' will always be longer
|
||||
for i in range(len(split_fmt)):
|
||||
final_fmt_pattern += split_fmt[i]
|
||||
if i < len(escaped_data):
|
||||
final_fmt_pattern += escaped_data[i][1:-1]
|
||||
|
||||
# Wrap final_fmt_pattern in a custom word boundary to strictly
|
||||
# match the formatting pattern and filter out date and time formats
|
||||
# that include junk such as: blah1998-09-12 blah, blah 1998-09-12blah,
|
||||
# blah1998-09-12blah. The custom word boundary matches every character
|
||||
# that is not a whitespace character to allow for searching for a date
|
||||
# and time string in a natural language sentence. Therefore, searching
|
||||
# for a string of the form YYYY-MM-DD in "blah 1998-09-12 blah" will
|
||||
# work properly.
|
||||
# Certain punctuation before or after the target pattern such as
|
||||
# "1998-09-12," is permitted. For the full list of valid punctuation,
|
||||
# see the documentation.
|
||||
|
||||
starting_word_boundary = (
|
||||
r"(?<!\S\S)" # Don't have two consecutive non-whitespace characters. This ensures that we allow cases like .11.25.2019 but not 1.11.25.2019 (for pattern MM.DD.YYYY)
|
||||
r"(?<![^\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)<>\s])" # This is the list of punctuation that is ok before the pattern (i.e. "It can't not be these characters before the pattern")
|
||||
r"(\b|^)" # The \b is to block cases like 1201912 but allow 201912 for pattern YYYYMM. The ^ was necessary to allow a negative number through i.e. before epoch numbers
|
||||
)
|
||||
ending_word_boundary = (
|
||||
r"(?=[\,\.\;\:\?\!\"\'\`\[\]\{\}\(\)\<\>]?" # Positive lookahead stating that these punctuation marks can appear after the pattern at most 1 time
|
||||
r"(?!\S))" # Don't allow any non-whitespace character after the punctuation
|
||||
)
|
||||
bounded_fmt_pattern = r"{}{}{}".format(
|
||||
starting_word_boundary, final_fmt_pattern, ending_word_boundary
|
||||
)
|
||||
|
||||
return tokens, re.compile(bounded_fmt_pattern, flags=re.IGNORECASE)
|
||||
|
||||
def _parse_token(self, token, value, parts):
|
||||
|
||||
if token == "YYYY":
|
||||
parts["year"] = int(value)
|
||||
|
||||
elif token == "YY":
|
||||
value = int(value)
|
||||
parts["year"] = 1900 + value if value > 68 else 2000 + value
|
||||
|
||||
elif token in ["MMMM", "MMM"]:
|
||||
parts["month"] = self.locale.month_number(value.lower())
|
||||
|
||||
elif token in ["MM", "M"]:
|
||||
parts["month"] = int(value)
|
||||
|
||||
elif token in ["DDDD", "DDD"]:
|
||||
parts["day_of_year"] = int(value)
|
||||
|
||||
elif token in ["DD", "D"]:
|
||||
parts["day"] = int(value)
|
||||
|
||||
elif token == "Do":
|
||||
parts["day"] = int(value)
|
||||
|
||||
elif token == "dddd":
|
||||
# locale day names are 1-indexed
|
||||
day_of_week = [x.lower() for x in self.locale.day_names].index(
|
||||
value.lower()
|
||||
)
|
||||
parts["day_of_week"] = day_of_week - 1
|
||||
|
||||
elif token == "ddd":
|
||||
# locale day abbreviations are 1-indexed
|
||||
day_of_week = [x.lower() for x in self.locale.day_abbreviations].index(
|
||||
value.lower()
|
||||
)
|
||||
parts["day_of_week"] = day_of_week - 1
|
||||
|
||||
elif token.upper() in ["HH", "H"]:
|
||||
parts["hour"] = int(value)
|
||||
|
||||
elif token in ["mm", "m"]:
|
||||
parts["minute"] = int(value)
|
||||
|
||||
elif token in ["ss", "s"]:
|
||||
parts["second"] = int(value)
|
||||
|
||||
elif token == "S":
|
||||
# We have the *most significant* digits of an arbitrary-precision integer.
|
||||
# We want the six most significant digits as an integer, rounded.
|
||||
# IDEA: add nanosecond support somehow? Need datetime support for it first.
|
||||
value = value.ljust(7, str("0"))
|
||||
|
||||
# floating-point (IEEE-754) defaults to half-to-even rounding
|
||||
seventh_digit = int(value[6])
|
||||
if seventh_digit == 5:
|
||||
rounding = int(value[5]) % 2
|
||||
elif seventh_digit > 5:
|
||||
rounding = 1
|
||||
else:
|
||||
rounding = 0
|
||||
|
||||
parts["microsecond"] = int(value[:6]) + rounding
|
||||
|
||||
elif token == "X":
|
||||
parts["timestamp"] = float(value)
|
||||
|
||||
elif token == "x":
|
||||
parts["expanded_timestamp"] = int(value)
|
||||
|
||||
elif token in ["ZZZ", "ZZ", "Z"]:
|
||||
parts["tzinfo"] = TzinfoParser.parse(value)
|
||||
|
||||
elif token in ["a", "A"]:
|
||||
if value in (self.locale.meridians["am"], self.locale.meridians["AM"]):
|
||||
parts["am_pm"] = "am"
|
||||
elif value in (self.locale.meridians["pm"], self.locale.meridians["PM"]):
|
||||
parts["am_pm"] = "pm"
|
||||
|
||||
elif token == "W":
|
||||
parts["weekdate"] = value
|
||||
|
||||
@staticmethod
|
||||
def _build_datetime(parts):
|
||||
|
||||
weekdate = parts.get("weekdate")
|
||||
|
||||
if weekdate is not None:
|
||||
# we can use strptime (%G, %V, %u) in python 3.6 but these tokens aren't available before that
|
||||
year, week = int(weekdate[0]), int(weekdate[1])
|
||||
|
||||
if weekdate[2] is not None:
|
||||
day = int(weekdate[2])
|
||||
else:
|
||||
# day not given, default to 1
|
||||
day = 1
|
||||
|
||||
dt = iso_to_gregorian(year, week, day)
|
||||
parts["year"] = dt.year
|
||||
parts["month"] = dt.month
|
||||
parts["day"] = dt.day
|
||||
|
||||
timestamp = parts.get("timestamp")
|
||||
|
||||
if timestamp is not None:
|
||||
return datetime.fromtimestamp(timestamp, tz=tz.tzutc())
|
||||
|
||||
expanded_timestamp = parts.get("expanded_timestamp")
|
||||
|
||||
if expanded_timestamp is not None:
|
||||
return datetime.fromtimestamp(
|
||||
normalize_timestamp(expanded_timestamp),
|
||||
tz=tz.tzutc(),
|
||||
)
|
||||
|
||||
day_of_year = parts.get("day_of_year")
|
||||
|
||||
if day_of_year is not None:
|
||||
year = parts.get("year")
|
||||
month = parts.get("month")
|
||||
if year is None:
|
||||
raise ParserError(
|
||||
"Year component is required with the DDD and DDDD tokens."
|
||||
)
|
||||
|
||||
if month is not None:
|
||||
raise ParserError(
|
||||
"Month component is not allowed with the DDD and DDDD tokens."
|
||||
)
|
||||
|
||||
date_string = "{}-{}".format(year, day_of_year)
|
||||
try:
|
||||
dt = datetime.strptime(date_string, "%Y-%j")
|
||||
except ValueError:
|
||||
raise ParserError(
|
||||
"The provided day of year '{}' is invalid.".format(day_of_year)
|
||||
)
|
||||
|
||||
parts["year"] = dt.year
|
||||
parts["month"] = dt.month
|
||||
parts["day"] = dt.day
|
||||
|
||||
day_of_week = parts.get("day_of_week")
|
||||
day = parts.get("day")
|
||||
|
||||
# If day is passed, ignore day of week
|
||||
if day_of_week is not None and day is None:
|
||||
year = parts.get("year", 1970)
|
||||
month = parts.get("month", 1)
|
||||
day = 1
|
||||
|
||||
# dddd => first day of week after epoch
|
||||
# dddd YYYY => first day of week in specified year
|
||||
# dddd MM YYYY => first day of week in specified year and month
|
||||
# dddd MM => first day after epoch in specified month
|
||||
next_weekday_dt = next_weekday(datetime(year, month, day), day_of_week)
|
||||
parts["year"] = next_weekday_dt.year
|
||||
parts["month"] = next_weekday_dt.month
|
||||
parts["day"] = next_weekday_dt.day
|
||||
|
||||
am_pm = parts.get("am_pm")
|
||||
hour = parts.get("hour", 0)
|
||||
|
||||
if am_pm == "pm" and hour < 12:
|
||||
hour += 12
|
||||
elif am_pm == "am" and hour == 12:
|
||||
hour = 0
|
||||
|
||||
# Support for midnight at the end of day
|
||||
if hour == 24:
|
||||
if parts.get("minute", 0) != 0:
|
||||
raise ParserError("Midnight at the end of day must not contain minutes")
|
||||
if parts.get("second", 0) != 0:
|
||||
raise ParserError("Midnight at the end of day must not contain seconds")
|
||||
if parts.get("microsecond", 0) != 0:
|
||||
raise ParserError(
|
||||
"Midnight at the end of day must not contain microseconds"
|
||||
)
|
||||
hour = 0
|
||||
day_increment = 1
|
||||
else:
|
||||
day_increment = 0
|
||||
|
||||
# account for rounding up to 1000000
|
||||
microsecond = parts.get("microsecond", 0)
|
||||
if microsecond == 1000000:
|
||||
microsecond = 0
|
||||
second_increment = 1
|
||||
else:
|
||||
second_increment = 0
|
||||
|
||||
increment = timedelta(days=day_increment, seconds=second_increment)
|
||||
|
||||
return (
|
||||
datetime(
|
||||
year=parts.get("year", 1),
|
||||
month=parts.get("month", 1),
|
||||
day=parts.get("day", 1),
|
||||
hour=hour,
|
||||
minute=parts.get("minute", 0),
|
||||
second=parts.get("second", 0),
|
||||
microsecond=microsecond,
|
||||
tzinfo=parts.get("tzinfo"),
|
||||
)
|
||||
+ increment
|
||||
)
|
||||
|
||||
def _parse_multiformat(self, string, formats):
|
||||
|
||||
_datetime = None
|
||||
|
||||
for fmt in formats:
|
||||
try:
|
||||
_datetime = self.parse(string, fmt)
|
||||
break
|
||||
except ParserMatchError:
|
||||
pass
|
||||
|
||||
if _datetime is None:
|
||||
raise ParserError(
|
||||
"Could not match input '{}' to any of the following formats: {}".format(
|
||||
string, ", ".join(formats)
|
||||
)
|
||||
)
|
||||
|
||||
return _datetime
|
||||
|
||||
# generates a capture group of choices separated by an OR operator
|
||||
@staticmethod
|
||||
def _generate_choice_re(choices, flags=0):
|
||||
return re.compile(r"({})".format("|".join(choices)), flags=flags)
|
||||
|
||||
|
||||
class TzinfoParser(object):
|
||||
_TZINFO_RE = re.compile(r"^([\+\-])?(\d{2})(?:\:?(\d{2}))?$")
|
||||
|
||||
@classmethod
|
||||
def parse(cls, tzinfo_string):
|
||||
|
||||
tzinfo = None
|
||||
|
||||
if tzinfo_string == "local":
|
||||
tzinfo = tz.tzlocal()
|
||||
|
||||
elif tzinfo_string in ["utc", "UTC", "Z"]:
|
||||
tzinfo = tz.tzutc()
|
||||
|
||||
else:
|
||||
|
||||
iso_match = cls._TZINFO_RE.match(tzinfo_string)
|
||||
|
||||
if iso_match:
|
||||
sign, hours, minutes = iso_match.groups()
|
||||
if minutes is None:
|
||||
minutes = 0
|
||||
seconds = int(hours) * 3600 + int(minutes) * 60
|
||||
|
||||
if sign == "-":
|
||||
seconds *= -1
|
||||
|
||||
tzinfo = tz.tzoffset(None, seconds)
|
||||
|
||||
else:
|
||||
tzinfo = tz.gettz(tzinfo_string)
|
||||
|
||||
if tzinfo is None:
|
||||
raise ParserError(
|
||||
'Could not parse timezone expression "{}"'.format(tzinfo_string)
|
||||
)
|
||||
|
||||
return tzinfo
|
||||
|
|
@ -1,115 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import numbers
|
||||
|
||||
from dateutil.rrule import WEEKLY, rrule
|
||||
|
||||
from arrow.constants import MAX_TIMESTAMP, MAX_TIMESTAMP_MS, MAX_TIMESTAMP_US
|
||||
|
||||
|
||||
def next_weekday(start_date, weekday):
|
||||
"""Get next weekday from the specified start date.
|
||||
|
||||
:param start_date: Datetime object representing the start date.
|
||||
:param weekday: Next weekday to obtain. Can be a value between 0 (Monday) and 6 (Sunday).
|
||||
:return: Datetime object corresponding to the next weekday after start_date.
|
||||
|
||||
Usage::
|
||||
|
||||
# Get first Monday after epoch
|
||||
>>> next_weekday(datetime(1970, 1, 1), 0)
|
||||
1970-01-05 00:00:00
|
||||
|
||||
# Get first Thursday after epoch
|
||||
>>> next_weekday(datetime(1970, 1, 1), 3)
|
||||
1970-01-01 00:00:00
|
||||
|
||||
# Get first Sunday after epoch
|
||||
>>> next_weekday(datetime(1970, 1, 1), 6)
|
||||
1970-01-04 00:00:00
|
||||
"""
|
||||
if weekday < 0 or weekday > 6:
|
||||
raise ValueError("Weekday must be between 0 (Monday) and 6 (Sunday).")
|
||||
return rrule(freq=WEEKLY, dtstart=start_date, byweekday=weekday, count=1)[0]
|
||||
|
||||
|
||||
def total_seconds(td):
|
||||
"""Get total seconds for timedelta."""
|
||||
return td.total_seconds()
|
||||
|
||||
|
||||
def is_timestamp(value):
|
||||
"""Check if value is a valid timestamp."""
|
||||
if isinstance(value, bool):
|
||||
return False
|
||||
if not (
|
||||
isinstance(value, numbers.Integral)
|
||||
or isinstance(value, float)
|
||||
or isinstance(value, str)
|
||||
):
|
||||
return False
|
||||
try:
|
||||
float(value)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def normalize_timestamp(timestamp):
|
||||
"""Normalize millisecond and microsecond timestamps into normal timestamps."""
|
||||
if timestamp > MAX_TIMESTAMP:
|
||||
if timestamp < MAX_TIMESTAMP_MS:
|
||||
timestamp /= 1e3
|
||||
elif timestamp < MAX_TIMESTAMP_US:
|
||||
timestamp /= 1e6
|
||||
else:
|
||||
raise ValueError(
|
||||
"The specified timestamp '{}' is too large.".format(timestamp)
|
||||
)
|
||||
return timestamp
|
||||
|
||||
|
||||
# Credit to https://stackoverflow.com/a/1700069
|
||||
def iso_to_gregorian(iso_year, iso_week, iso_day):
|
||||
"""Converts an ISO week date tuple into a datetime object."""
|
||||
|
||||
if not 1 <= iso_week <= 53:
|
||||
raise ValueError("ISO Calendar week value must be between 1-53.")
|
||||
|
||||
if not 1 <= iso_day <= 7:
|
||||
raise ValueError("ISO Calendar day value must be between 1-7")
|
||||
|
||||
# The first week of the year always contains 4 Jan.
|
||||
fourth_jan = datetime.date(iso_year, 1, 4)
|
||||
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
|
||||
year_start = fourth_jan - delta
|
||||
gregorian = year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
|
||||
|
||||
return gregorian
|
||||
|
||||
|
||||
def validate_bounds(bounds):
|
||||
if bounds != "()" and bounds != "(]" and bounds != "[)" and bounds != "[]":
|
||||
raise ValueError(
|
||||
'Invalid bounds. Please select between "()", "(]", "[)", or "[]".'
|
||||
)
|
||||
|
||||
|
||||
# Python 2.7 / 3.0+ definitions for isstr function.
|
||||
|
||||
try: # pragma: no cover
|
||||
basestring
|
||||
|
||||
def isstr(s):
|
||||
return isinstance(s, basestring) # noqa: F821
|
||||
|
||||
|
||||
except NameError: # pragma: no cover
|
||||
|
||||
def isstr(s):
|
||||
return isinstance(s, str)
|
||||
|
||||
|
||||
__all__ = ["next_weekday", "total_seconds", "is_timestamp", "isstr", "iso_to_gregorian"]
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
from ._cmp import cmp_using
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
attrib,
|
||||
attrs,
|
||||
fields,
|
||||
fields_dict,
|
||||
make_class,
|
||||
validate,
|
||||
)
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
__version__ = "21.4.0"
|
||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||
|
||||
__title__ = "attrs"
|
||||
__description__ = "Classes Without Boilerplate"
|
||||
__url__ = "https://www.attrs.org/"
|
||||
__uri__ = __url__
|
||||
__doc__ = __description__ + " <" + __uri__ + ">"
|
||||
|
||||
__author__ = "Hynek Schlawack"
|
||||
__email__ = "hs@ox.cx"
|
||||
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
|
||||
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||
|
||||
__all__ = [
|
||||
"Attribute",
|
||||
"Factory",
|
||||
"NOTHING",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"attr",
|
||||
"attrib",
|
||||
"attributes",
|
||||
"attrs",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"get_run_validators",
|
||||
"has",
|
||||
"ib",
|
||||
"make_class",
|
||||
"resolve_types",
|
||||
"s",
|
||||
"set_run_validators",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
from ._next_gen import define, field, frozen, mutable # noqa: F401
|
||||
|
||||
__all__.extend(("define", "field", "frozen", "mutable"))
|
||||
|
|
@ -1,484 +0,0 @@
|
|||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import converters as converters
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import setters as setters
|
||||
from . import validators as validators
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
__version__: str
|
||||
__version_info__: VersionInfo
|
||||
__title__: str
|
||||
__description__: str
|
||||
__url__: str
|
||||
__uri__: str
|
||||
__author__: str
|
||||
__email__: str
|
||||
__license__: str
|
||||
__copyright__: str
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_EqOrderType = Union[bool, Callable[[Any], Any]]
|
||||
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
||||
_ConverterType = Callable[[Any], Any]
|
||||
_FilterType = Callable[[Attribute[_T], _T], bool]
|
||||
_ReprType = Callable[[Any], str]
|
||||
_ReprArgType = Union[bool, _ReprType]
|
||||
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
||||
_OnSetAttrArgType = Union[
|
||||
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
||||
]
|
||||
_FieldTransformer = Callable[
|
||||
[type, List[Attribute[Any]]], List[Attribute[Any]]
|
||||
]
|
||||
_CompareWithType = Callable[[Any, Any], bool]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||
# or tuple, but those are invariant and so would prevent subtypes of
|
||||
# _ValidatorType from working when passed in a list or tuple.
|
||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||
|
||||
# _make --
|
||||
|
||||
NOTHING: object
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible:
|
||||
# `x: List[int] # = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[Any], _T],
|
||||
takes_self: Literal[True],
|
||||
) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[], _T],
|
||||
takes_self: Literal[False],
|
||||
) -> _T: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||
takes_self: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# Static type inference support via __dataclass_transform__ implemented as per:
|
||||
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
|
||||
# This annotation must be applied to all overloads of "define" and "attrs"
|
||||
#
|
||||
# NOTE: This is a typing construct and does not exist at runtime. Extensions
|
||||
# wrapping attrs decorators should declare a separate __dataclass_transform__
|
||||
# signature in the extension module using the specification linked above to
|
||||
# provide pyright support.
|
||||
def __dataclass_transform__(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
||||
) -> Callable[[_T], _T]: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: Optional[_T]
|
||||
validator: Optional[_ValidatorType[_T]]
|
||||
repr: _ReprArgType
|
||||
cmp: _EqOrderType
|
||||
eq: _EqOrderType
|
||||
order: _EqOrderType
|
||||
hash: Optional[bool]
|
||||
init: bool
|
||||
converter: Optional[_ConverterType]
|
||||
metadata: Dict[Any, Any]
|
||||
type: Optional[Type[_T]]
|
||||
kw_only: bool
|
||||
on_setattr: _OnSetAttrType
|
||||
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting
|
||||
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments
|
||||
# returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: object = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
mutable = define
|
||||
frozen = define # they differ only in their defaults
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
class _Fields(Tuple[Attribute[Any], ...]):
|
||||
def __getattr__(self, name: str) -> Attribute[Any]: ...
|
||||
|
||||
def fields(cls: type) -> _Fields: ...
|
||||
def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
|
||||
def validate(inst: Any) -> None: ...
|
||||
def resolve_types(
|
||||
cls: _C,
|
||||
globalns: Optional[Dict[str, Any]] = ...,
|
||||
localns: Optional[Dict[str, Any]] = ...,
|
||||
attribs: Optional[List[Attribute[Any]]] = ...,
|
||||
) -> _C: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||
# [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||
bases: Tuple[type, ...] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||
# these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
# XXX: remember to fix attrs.asdict/astuple too!
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: Optional[bool] = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
def has(cls: type) -> bool: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||
154
client/ayon_core/vendor/python/python_2/attr/_cmp.py
vendored
154
client/ayon_core/vendor/python/python_2/attr/_cmp.py
vendored
|
|
@ -1,154 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import functools
|
||||
|
||||
from ._compat import new_class
|
||||
from ._make import _make_ne
|
||||
|
||||
|
||||
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
|
||||
|
||||
|
||||
def cmp_using(
|
||||
eq=None,
|
||||
lt=None,
|
||||
le=None,
|
||||
gt=None,
|
||||
ge=None,
|
||||
require_same_type=True,
|
||||
class_name="Comparable",
|
||||
):
|
||||
"""
|
||||
Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
|
||||
``cmp`` arguments to customize field comparison.
|
||||
|
||||
The resulting class will have a full set of ordering methods if
|
||||
at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
|
||||
|
||||
:param Optional[callable] eq: `callable` used to evaluate equality
|
||||
of two objects.
|
||||
:param Optional[callable] lt: `callable` used to evaluate whether
|
||||
one object is less than another object.
|
||||
:param Optional[callable] le: `callable` used to evaluate whether
|
||||
one object is less than or equal to another object.
|
||||
:param Optional[callable] gt: `callable` used to evaluate whether
|
||||
one object is greater than another object.
|
||||
:param Optional[callable] ge: `callable` used to evaluate whether
|
||||
one object is greater than or equal to another object.
|
||||
|
||||
:param bool require_same_type: When `True`, equality and ordering methods
|
||||
will return `NotImplemented` if objects are not of the same type.
|
||||
|
||||
:param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
|
||||
|
||||
See `comparison` for more details.
|
||||
|
||||
.. versionadded:: 21.1.0
|
||||
"""
|
||||
|
||||
body = {
|
||||
"__slots__": ["value"],
|
||||
"__init__": _make_init(),
|
||||
"_requirements": [],
|
||||
"_is_comparable_to": _is_comparable_to,
|
||||
}
|
||||
|
||||
# Add operations.
|
||||
num_order_functions = 0
|
||||
has_eq_function = False
|
||||
|
||||
if eq is not None:
|
||||
has_eq_function = True
|
||||
body["__eq__"] = _make_operator("eq", eq)
|
||||
body["__ne__"] = _make_ne()
|
||||
|
||||
if lt is not None:
|
||||
num_order_functions += 1
|
||||
body["__lt__"] = _make_operator("lt", lt)
|
||||
|
||||
if le is not None:
|
||||
num_order_functions += 1
|
||||
body["__le__"] = _make_operator("le", le)
|
||||
|
||||
if gt is not None:
|
||||
num_order_functions += 1
|
||||
body["__gt__"] = _make_operator("gt", gt)
|
||||
|
||||
if ge is not None:
|
||||
num_order_functions += 1
|
||||
body["__ge__"] = _make_operator("ge", ge)
|
||||
|
||||
type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
|
||||
|
||||
# Add same type requirement.
|
||||
if require_same_type:
|
||||
type_._requirements.append(_check_same_type)
|
||||
|
||||
# Add total ordering if at least one operation was defined.
|
||||
if 0 < num_order_functions < 4:
|
||||
if not has_eq_function:
|
||||
# functools.total_ordering requires __eq__ to be defined,
|
||||
# so raise early error here to keep a nice stack.
|
||||
raise ValueError(
|
||||
"eq must be define is order to complete ordering from "
|
||||
"lt, le, gt, ge."
|
||||
)
|
||||
type_ = functools.total_ordering(type_)
|
||||
|
||||
return type_
|
||||
|
||||
|
||||
def _make_init():
|
||||
"""
|
||||
Create __init__ method.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
"""
|
||||
Initialize object with *value*.
|
||||
"""
|
||||
self.value = value
|
||||
|
||||
return __init__
|
||||
|
||||
|
||||
def _make_operator(name, func):
|
||||
"""
|
||||
Create operator method.
|
||||
"""
|
||||
|
||||
def method(self, other):
|
||||
if not self._is_comparable_to(other):
|
||||
return NotImplemented
|
||||
|
||||
result = func(self.value, other.value)
|
||||
if result is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
return result
|
||||
|
||||
method.__name__ = "__%s__" % (name,)
|
||||
method.__doc__ = "Return a %s b. Computed by attrs." % (
|
||||
_operation_names[name],
|
||||
)
|
||||
|
||||
return method
|
||||
|
||||
|
||||
def _is_comparable_to(self, other):
|
||||
"""
|
||||
Check whether `other` is comparable to `self`.
|
||||
"""
|
||||
for func in self._requirements:
|
||||
if not func(self, other):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _check_same_type(self, other):
|
||||
"""
|
||||
Return True if *self* and *other* are of the same type, False otherwise.
|
||||
"""
|
||||
return other.value.__class__ is self.value.__class__
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from typing import Type
|
||||
|
||||
from . import _CompareWithType
|
||||
|
||||
def cmp_using(
|
||||
eq: Optional[_CompareWithType],
|
||||
lt: Optional[_CompareWithType],
|
||||
le: Optional[_CompareWithType],
|
||||
gt: Optional[_CompareWithType],
|
||||
ge: Optional[_CompareWithType],
|
||||
require_same_type: bool,
|
||||
class_name: str,
|
||||
) -> Type: ...
|
||||
|
|
@ -1,261 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
PY36 = sys.version_info[:2] >= (3, 6)
|
||||
HAS_F_STRINGS = PY36
|
||||
PY310 = sys.version_info[:2] >= (3, 10)
|
||||
|
||||
|
||||
if PYPY or PY36:
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
if PY2:
|
||||
from collections import Mapping, Sequence
|
||||
|
||||
from UserDict import IterableUserDict
|
||||
|
||||
# We 'bundle' isclass instead of using inspect as importing inspect is
|
||||
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
||||
def isclass(klass):
|
||||
return isinstance(klass, (type, types.ClassType))
|
||||
|
||||
def new_class(name, bases, kwds, exec_body):
|
||||
"""
|
||||
A minimal stub of types.new_class that we need for make_class.
|
||||
"""
|
||||
ns = {}
|
||||
exec_body(ns)
|
||||
|
||||
return type(name, bases, ns)
|
||||
|
||||
# TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
|
||||
TYPE = "type"
|
||||
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
# Python 2 is bereft of a read-only dict proxy, so we make one!
|
||||
class ReadOnlyDict(IterableUserDict):
|
||||
"""
|
||||
Best-effort read-only dict wrapper.
|
||||
"""
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item assignment"
|
||||
)
|
||||
|
||||
def update(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'update'"
|
||||
)
|
||||
|
||||
def __delitem__(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item deletion"
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'clear'"
|
||||
)
|
||||
|
||||
def pop(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'pop'"
|
||||
)
|
||||
|
||||
def popitem(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'popitem'"
|
||||
)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'setdefault'"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# Override to be identical to the Python 3 version.
|
||||
return "mappingproxy(" + repr(self.data) + ")"
|
||||
|
||||
def metadata_proxy(d):
|
||||
res = ReadOnlyDict()
|
||||
res.data.update(d) # We blocked update, so we have to do it like this.
|
||||
return res
|
||||
|
||||
def just_warn(*args, **kw): # pragma: no cover
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
|
||||
else: # Python 3 and later.
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
def isclass(klass):
|
||||
return isinstance(klass, type)
|
||||
|
||||
TYPE = "class"
|
||||
|
||||
def iteritems(d):
|
||||
return d.items()
|
||||
|
||||
new_class = types.new_class
|
||||
|
||||
def metadata_proxy(d):
|
||||
return types.MappingProxyType(dict(d))
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY:
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
if PY2:
|
||||
co = set_first_cellvar_to.func_code
|
||||
else:
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
# CPython 3.8+ has an incompatible CodeType signature
|
||||
# (added a posonlyargcount argument) but also added
|
||||
# CodeType.replace() to do this without counting parameters.
|
||||
set_first_freevar_code = co.replace(
|
||||
co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
|
||||
)
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
if not PY2:
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
if PY2:
|
||||
cell = make_func_with_cell().func_closure[0]
|
||||
else:
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
|
||||
# Thread-local global to track attrs instances which are already being repr'd.
|
||||
# This is needed because there is no other (thread-safe) way to pass info
|
||||
# about the instances that are already being repr'd through the call stack
|
||||
# in order to ensure we don't perform infinite recursion.
|
||||
#
|
||||
# For instance, if an instance contains a dict which contains that instance,
|
||||
# we need to know that we're already repr'ing the outside instance from within
|
||||
# the dict's repr() call.
|
||||
#
|
||||
# This lives here rather than in _make.py so that the functions in _make.py
|
||||
# don't have a direct reference to the thread-local in their globals dict.
|
||||
# If they have such a reference, it breaks cloudpickle.
|
||||
repr_context = threading.local()
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
|
||||
_run_validators = True
|
||||
|
||||
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
||||
instead.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
global _run_validators
|
||||
_run_validators = run
|
||||
|
||||
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
|
||||
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
||||
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
||||
instead.
|
||||
"""
|
||||
return _run_validators
|
||||
|
|
@ -1,422 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
|
||||
def asdict(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
dict_factory=dict,
|
||||
retain_collection_types=False,
|
||||
value_serializer=None,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a dict.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
dictionaries, pass in ``collections.OrderedDict``.
|
||||
:param bool retain_collection_types: Do not convert to ``list`` when
|
||||
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
||||
meaningful if ``recurse`` is ``True``.
|
||||
:param Optional[callable] value_serializer: A hook that is called for every
|
||||
attribute or dict key/value. It receives the current instance, field
|
||||
and value and must return the (updated) value. The hook is run *after*
|
||||
the optional *filter* has been applied.
|
||||
|
||||
:rtype: return type of *dict_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
.. versionadded:: 20.3.0 *value_serializer*
|
||||
.. versionadded:: 21.3.0 If a dict has a collection for a key, it is
|
||||
serialized as a tuple.
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
|
||||
if value_serializer is not None:
|
||||
v = value_serializer(inst, a, v)
|
||||
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
rv[a.name] = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = dict_factory
|
||||
rv[a.name] = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
else:
|
||||
rv[a.name] = v
|
||||
return rv
|
||||
|
||||
|
||||
def _asdict_anything(
|
||||
val,
|
||||
is_key,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
):
|
||||
"""
|
||||
``asdict`` only works on attrs instances, this works on anything.
|
||||
"""
|
||||
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||
# Attrs class.
|
||||
rv = asdict(
|
||||
val,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||
if retain_collection_types is True:
|
||||
cf = val.__class__
|
||||
elif is_key:
|
||||
cf = tuple
|
||||
else:
|
||||
cf = list
|
||||
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=dict_factory,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
)
|
||||
elif isinstance(val, dict):
|
||||
df = dict_factory
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
is_key=True,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
is_key=False,
|
||||
filter=filter,
|
||||
dict_factory=df,
|
||||
retain_collection_types=retain_collection_types,
|
||||
value_serializer=value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
if value_serializer is not None:
|
||||
rv = value_serializer(None, None, rv)
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def astuple(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
tuple_factory=tuple,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a tuple.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attrs.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
:param bool retain_collection_types: Do not convert to ``list``
|
||||
or ``dict`` when encountering an attribute which type is
|
||||
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
|
||||
``True``.
|
||||
|
||||
:rtype: return type of *tuple_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = []
|
||||
retain = retain_collection_types # Very long. :/
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv.append(
|
||||
astuple(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain is True else list
|
||||
rv.append(
|
||||
cf(
|
||||
[
|
||||
astuple(
|
||||
j,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(j.__class__)
|
||||
else j
|
||||
for j in v
|
||||
]
|
||||
)
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = v.__class__ if retain is True else dict
|
||||
rv.append(
|
||||
df(
|
||||
(
|
||||
astuple(
|
||||
kk,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(kk.__class__)
|
||||
else kk,
|
||||
astuple(
|
||||
vv,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
)
|
||||
else:
|
||||
rv.append(v)
|
||||
else:
|
||||
rv.append(v)
|
||||
|
||||
return rv if tuple_factory is list else tuple_factory(rv)
|
||||
|
||||
|
||||
def has(cls):
|
||||
"""
|
||||
Check whether *cls* is a class with ``attrs`` attributes.
|
||||
|
||||
:param type cls: Class to introspect.
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return getattr(cls, "__attrs_attrs__", None) is not None
|
||||
|
||||
|
||||
def assoc(inst, **changes):
|
||||
"""
|
||||
Copy *inst* and apply *changes*.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
|
||||
be found on *cls*.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `attrs.evolve` instead if you can.
|
||||
This function will not be removed du to the slightly different approach
|
||||
compared to `attrs.evolve`.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"assoc is deprecated and will be removed after 2018/01.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
"{k} is not an attrs attribute on {cl}.".format(
|
||||
k=k, cl=new.__class__
|
||||
)
|
||||
)
|
||||
_obj_setattr(new, k, v)
|
||||
return new
|
||||
|
||||
|
||||
def evolve(inst, **changes):
|
||||
"""
|
||||
Create a new instance, based on *inst* with *changes* applied.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise TypeError: If *attr_name* couldn't be found in the class
|
||||
``__init__``.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
cls = inst.__class__
|
||||
attrs = fields(cls)
|
||||
for a in attrs:
|
||||
if not a.init:
|
||||
continue
|
||||
attr_name = a.name # To deal with private attributes.
|
||||
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
||||
if init_name not in changes:
|
||||
changes[init_name] = getattr(inst, attr_name)
|
||||
|
||||
return cls(**changes)
|
||||
|
||||
|
||||
def resolve_types(cls, globalns=None, localns=None, attribs=None):
|
||||
"""
|
||||
Resolve any strings and forward annotations in type annotations.
|
||||
|
||||
This is only required if you need concrete types in `Attribute`'s *type*
|
||||
field. In other words, you don't need to resolve your types if you only
|
||||
use them for static type checking.
|
||||
|
||||
With no arguments, names will be looked up in the module in which the class
|
||||
was created. If this is not what you want, e.g. if the name only exists
|
||||
inside a method, you may pass *globalns* or *localns* to specify other
|
||||
dictionaries in which to look up these names. See the docs of
|
||||
`typing.get_type_hints` for more details.
|
||||
|
||||
:param type cls: Class to resolve.
|
||||
:param Optional[dict] globalns: Dictionary containing global variables.
|
||||
:param Optional[dict] localns: Dictionary containing local variables.
|
||||
:param Optional[list] attribs: List of attribs for the given class.
|
||||
This is necessary when calling from inside a ``field_transformer``
|
||||
since *cls* is not an ``attrs`` class yet.
|
||||
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class and you didn't pass any attribs.
|
||||
:raise NameError: If types cannot be resolved because of missing variables.
|
||||
|
||||
:returns: *cls* so you can use this function also as a class decorator.
|
||||
Please note that you have to apply it **after** `attrs.define`. That
|
||||
means the decorator has to come in the line **before** `attrs.define`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionadded:: 21.1.0 *attribs*
|
||||
|
||||
"""
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
if getattr(cls, "__attrs_types_resolved__", None) != cls:
|
||||
import typing
|
||||
|
||||
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
|
||||
for field in fields(cls) if attribs is None else attribs:
|
||||
if field.name in hints:
|
||||
# Since fields have been frozen we must work around it.
|
||||
_obj_setattr(field, "type", hints[field.name])
|
||||
# We store the class we resolved so that subclasses know they haven't
|
||||
# been resolved.
|
||||
cls.__attrs_types_resolved__ = cls
|
||||
|
||||
# Return the class so you can use it as a decorator too.
|
||||
return cls
|
||||
3173
client/ayon_core/vendor/python/python_2/attr/_make.py
vendored
3173
client/ayon_core/vendor/python/python_2/attr/_make.py
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,216 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
|
||||
`attr.ib` with different default values.
|
||||
"""
|
||||
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import setters
|
||||
from ._funcs import asdict as _asdict
|
||||
from ._funcs import astuple as _astuple
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
_frozen_setattrs,
|
||||
_ng_default_on_setattr,
|
||||
attrib,
|
||||
attrs,
|
||||
)
|
||||
from .exceptions import UnannotatedAttributeError
|
||||
|
||||
|
||||
def define(
|
||||
maybe_cls=None,
|
||||
*,
|
||||
these=None,
|
||||
repr=None,
|
||||
hash=None,
|
||||
init=None,
|
||||
slots=True,
|
||||
frozen=False,
|
||||
weakref_slot=True,
|
||||
str=False,
|
||||
auto_attribs=None,
|
||||
kw_only=False,
|
||||
cache_hash=False,
|
||||
auto_exc=True,
|
||||
eq=None,
|
||||
order=False,
|
||||
auto_detect=True,
|
||||
getstate_setstate=None,
|
||||
on_setattr=None,
|
||||
field_transformer=None,
|
||||
match_args=True,
|
||||
):
|
||||
r"""
|
||||
Define an ``attrs`` class.
|
||||
|
||||
Differences to the classic `attr.s` that it uses underneath:
|
||||
|
||||
- Automatically detect whether or not *auto_attribs* should be `True`
|
||||
(c.f. *auto_attribs* parameter).
|
||||
- If *frozen* is `False`, run converters and validators when setting an
|
||||
attribute by default.
|
||||
- *slots=True* (see :term:`slotted classes` for potentially surprising
|
||||
behaviors)
|
||||
- *auto_exc=True*
|
||||
- *auto_detect=True*
|
||||
- *order=False*
|
||||
- *match_args=True*
|
||||
- Some options that were only relevant on Python 2 or were kept around for
|
||||
backwards-compatibility have been removed.
|
||||
|
||||
Please note that these are all defaults and you can change them as you
|
||||
wish.
|
||||
|
||||
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
|
||||
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
|
||||
|
||||
1. If any attributes are annotated and no unannotated `attrs.fields`\ s
|
||||
are found, it assumes *auto_attribs=True*.
|
||||
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||
`attrs.fields`\ s.
|
||||
|
||||
For now, please refer to `attr.s` for the rest of the parameters.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
||||
"""
|
||||
|
||||
def do_it(cls, auto_attribs):
|
||||
return attrs(
|
||||
maybe_cls=cls,
|
||||
these=these,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
init=init,
|
||||
slots=slots,
|
||||
frozen=frozen,
|
||||
weakref_slot=weakref_slot,
|
||||
str=str,
|
||||
auto_attribs=auto_attribs,
|
||||
kw_only=kw_only,
|
||||
cache_hash=cache_hash,
|
||||
auto_exc=auto_exc,
|
||||
eq=eq,
|
||||
order=order,
|
||||
auto_detect=auto_detect,
|
||||
collect_by_mro=True,
|
||||
getstate_setstate=getstate_setstate,
|
||||
on_setattr=on_setattr,
|
||||
field_transformer=field_transformer,
|
||||
match_args=match_args,
|
||||
)
|
||||
|
||||
def wrap(cls):
|
||||
"""
|
||||
Making this a wrapper ensures this code runs during class creation.
|
||||
|
||||
We also ensure that frozen-ness of classes is inherited.
|
||||
"""
|
||||
nonlocal frozen, on_setattr
|
||||
|
||||
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||
|
||||
# By default, mutable classes convert & validate on setattr.
|
||||
if frozen is False and on_setattr is None:
|
||||
on_setattr = _ng_default_on_setattr
|
||||
|
||||
# However, if we subclass a frozen class, we inherit the immutability
|
||||
# and disable on_setattr.
|
||||
for base_cls in cls.__bases__:
|
||||
if base_cls.__setattr__ is _frozen_setattrs:
|
||||
if had_on_setattr:
|
||||
raise ValueError(
|
||||
"Frozen classes can't use on_setattr "
|
||||
"(frozen-ness was inherited)."
|
||||
)
|
||||
|
||||
on_setattr = setters.NO_OP
|
||||
break
|
||||
|
||||
if auto_attribs is not None:
|
||||
return do_it(cls, auto_attribs)
|
||||
|
||||
try:
|
||||
return do_it(cls, True)
|
||||
except UnannotatedAttributeError:
|
||||
return do_it(cls, False)
|
||||
|
||||
# maybe_cls's type depends on the usage of the decorator. It's a class
|
||||
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
|
||||
if maybe_cls is None:
|
||||
return wrap
|
||||
else:
|
||||
return wrap(maybe_cls)
|
||||
|
||||
|
||||
mutable = define
|
||||
frozen = partial(define, frozen=True, on_setattr=None)
|
||||
|
||||
|
||||
def field(
|
||||
*,
|
||||
default=NOTHING,
|
||||
validator=None,
|
||||
repr=True,
|
||||
hash=None,
|
||||
init=True,
|
||||
metadata=None,
|
||||
converter=None,
|
||||
factory=None,
|
||||
kw_only=False,
|
||||
eq=None,
|
||||
order=None,
|
||||
on_setattr=None,
|
||||
):
|
||||
"""
|
||||
Identical to `attr.ib`, except keyword-only and with some arguments
|
||||
removed.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
return attrib(
|
||||
default=default,
|
||||
validator=validator,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
init=init,
|
||||
metadata=metadata,
|
||||
converter=converter,
|
||||
factory=factory,
|
||||
kw_only=kw_only,
|
||||
eq=eq,
|
||||
order=order,
|
||||
on_setattr=on_setattr,
|
||||
)
|
||||
|
||||
|
||||
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
||||
"""
|
||||
Same as `attr.asdict`, except that collections types are always retained
|
||||
and dict is always used as *dict_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _asdict(
|
||||
inst=inst,
|
||||
recurse=recurse,
|
||||
filter=filter,
|
||||
value_serializer=value_serializer,
|
||||
retain_collection_types=True,
|
||||
)
|
||||
|
||||
|
||||
def astuple(inst, *, recurse=True, filter=None):
|
||||
"""
|
||||
Same as `attr.astuple`, except that collections types are always retained
|
||||
and `tuple` is always used as the *tuple_factory*.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _astuple(
|
||||
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
||||
)
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
from ._funcs import astuple
|
||||
from ._make import attrib, attrs
|
||||
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo(object):
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
||||
True
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
||||
True
|
||||
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
||||
>>> vi < (19, 1, 1)
|
||||
False
|
||||
>>> vi < (19,)
|
||||
False
|
||||
>>> vi == (19, 2,)
|
||||
True
|
||||
>>> vi == (19, 2, 1)
|
||||
False
|
||||
|
||||
.. versionadded:: 19.2
|
||||
"""
|
||||
|
||||
year = attrib(type=int)
|
||||
minor = attrib(type=int)
|
||||
micro = attrib(type=int)
|
||||
releaselevel = attrib(type=str)
|
||||
|
||||
@classmethod
|
||||
def _from_version_string(cls, s):
|
||||
"""
|
||||
Parse *s* and return a _VersionInfo.
|
||||
"""
|
||||
v = s.split(".")
|
||||
if len(v) == 3:
|
||||
v.append("final")
|
||||
|
||||
return cls(
|
||||
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
||||
)
|
||||
|
||||
def _ensure_tuple(self, other):
|
||||
"""
|
||||
Ensure *other* is a tuple of a valid length.
|
||||
|
||||
Returns a possibly transformed *other* and ourselves as a tuple of
|
||||
the same length as *other*.
|
||||
"""
|
||||
|
||||
if self.__class__ is other.__class__:
|
||||
other = astuple(other)
|
||||
|
||||
if not isinstance(other, tuple):
|
||||
raise NotImplementedError
|
||||
|
||||
if not (1 <= len(other) <= 4):
|
||||
raise NotImplementedError
|
||||
|
||||
return astuple(self)[: len(other)], other
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
return us == them
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
||||
# have to do anything special with releaselevel for now.
|
||||
return us < them
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
class VersionInfo:
|
||||
@property
|
||||
def year(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> str: ...
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import PY2
|
||||
from ._make import NOTHING, Factory, pipe
|
||||
|
||||
|
||||
if not PY2:
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
|
||||
__all__ = [
|
||||
"default_if_none",
|
||||
"optional",
|
||||
"pipe",
|
||||
"to_bool",
|
||||
]
|
||||
|
||||
|
||||
def optional(converter):
|
||||
"""
|
||||
A converter that allows an attribute to be optional. An optional attribute
|
||||
is one which can be set to ``None``.
|
||||
|
||||
Type annotations will be inferred from the wrapped converter's, if it
|
||||
has any.
|
||||
|
||||
:param callable converter: the converter that is used for non-``None``
|
||||
values.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
def optional_converter(val):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
if not PY2:
|
||||
sig = None
|
||||
try:
|
||||
sig = inspect.signature(converter)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
pass
|
||||
if sig:
|
||||
params = list(sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[
|
||||
params[0].annotation
|
||||
]
|
||||
if sig.return_annotation is not inspect.Signature.empty:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[
|
||||
sig.return_annotation
|
||||
]
|
||||
|
||||
return optional_converter
|
||||
|
||||
|
||||
def default_if_none(default=NOTHING, factory=None):
|
||||
"""
|
||||
A converter that allows to replace ``None`` values by *default* or the
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of `attrs.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes no parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of `attrs.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
if default is NOTHING and factory is None:
|
||||
raise TypeError("Must pass either `default` or `factory`.")
|
||||
|
||||
if default is not NOTHING and factory is not None:
|
||||
raise TypeError(
|
||||
"Must pass either `default` or `factory` but not both."
|
||||
)
|
||||
|
||||
if factory is not None:
|
||||
default = Factory(factory)
|
||||
|
||||
if isinstance(default, Factory):
|
||||
if default.takes_self:
|
||||
raise ValueError(
|
||||
"`takes_self` is not supported by default_if_none."
|
||||
)
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default.factory()
|
||||
|
||||
else:
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
|
||||
|
||||
def to_bool(val):
|
||||
"""
|
||||
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
|
||||
|
||||
Values mapping to :code:`True`:
|
||||
|
||||
- :code:`True`
|
||||
- :code:`"true"` / :code:`"t"`
|
||||
- :code:`"yes"` / :code:`"y"`
|
||||
- :code:`"on"`
|
||||
- :code:`"1"`
|
||||
- :code:`1`
|
||||
|
||||
Values mapping to :code:`False`:
|
||||
|
||||
- :code:`False`
|
||||
- :code:`"false"` / :code:`"f"`
|
||||
- :code:`"no"` / :code:`"n"`
|
||||
- :code:`"off"`
|
||||
- :code:`"0"`
|
||||
- :code:`0`
|
||||
|
||||
:raises ValueError: for any other value.
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = val.lower()
|
||||
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
|
||||
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
|
||||
try:
|
||||
if val in truthy:
|
||||
return True
|
||||
if val in falsy:
|
||||
return False
|
||||
except TypeError:
|
||||
# Raised when "val" is not hashable (e.g., lists)
|
||||
pass
|
||||
raise ValueError("Cannot convert value to bool: {}".format(val))
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
from typing import Callable, Optional, TypeVar, overload
|
||||
|
||||
from . import _ConverterType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(default: _T) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||
def to_bool(val: str) -> bool: ...
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
"""
|
||||
A frozen/immutable instance or attribute have been attempted to be
|
||||
modified.
|
||||
|
||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||
and subclassing `AttributeError`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
msg = "can't set attribute"
|
||||
args = [msg]
|
||||
|
||||
|
||||
class FrozenInstanceError(FrozenError):
|
||||
"""
|
||||
A frozen instance has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 16.1.0
|
||||
"""
|
||||
|
||||
|
||||
class FrozenAttributeError(FrozenError):
|
||||
"""
|
||||
A frozen attribute has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError):
|
||||
"""
|
||||
An ``attrs`` function couldn't find an attribute that the user asked for.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotAnAttrsClassError(ValueError):
|
||||
"""
|
||||
A non-``attrs`` class has been passed into an ``attrs`` function.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class DefaultAlreadySetError(RuntimeError):
|
||||
"""
|
||||
A default has been set using ``attr.ib()`` and is attempted to be reset
|
||||
using the decorator.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
|
||||
class UnannotatedAttributeError(RuntimeError):
|
||||
"""
|
||||
A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
|
||||
annotation.
|
||||
|
||||
.. versionadded:: 17.3.0
|
||||
"""
|
||||
|
||||
|
||||
class PythonTooOldError(RuntimeError):
|
||||
"""
|
||||
It was attempted to use an ``attrs`` feature that requires a newer Python
|
||||
version.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
"""
|
||||
A ``attr.ib()`` requiring a callable has been set with a value
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
|
||||
def __init__(self, msg, value):
|
||||
super(TypeError, self).__init__(msg, value)
|
||||
self.msg = msg
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.msg)
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
from typing import Any
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class FrozenInstanceError(FrozenError): ...
|
||||
class FrozenAttributeError(FrozenError): ...
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
class PythonTooOldError(RuntimeError): ...
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
msg: str = ...
|
||||
value: Any = ...
|
||||
def __init__(self, msg: str, value: Any) -> None: ...
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful filters for `attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
def _split_what(what):
|
||||
"""
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Include *what*.
|
||||
|
||||
:param what: What to include.
|
||||
:type what: `list` of `type` or `attrs.Attribute`\\ s
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def include_(attribute, value):
|
||||
return value.__class__ in cls or attribute in attrs
|
||||
|
||||
return include_
|
||||
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Exclude *what*.
|
||||
|
||||
:param what: What to exclude.
|
||||
:type what: `list` of classes or `attrs.Attribute`\\ s.
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def exclude_(attribute, value):
|
||||
return value.__class__ not in cls and attribute not in attrs
|
||||
|
||||
return exclude_
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
from typing import Any, Union
|
||||
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly used hooks for on_setattr.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from . import _config
|
||||
from .exceptions import FrozenAttributeError
|
||||
|
||||
|
||||
def pipe(*setters):
|
||||
"""
|
||||
Run all *setters* and return the return value of the last one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
def wrapped_pipe(instance, attrib, new_value):
|
||||
rv = new_value
|
||||
|
||||
for setter in setters:
|
||||
rv = setter(instance, attrib, rv)
|
||||
|
||||
return rv
|
||||
|
||||
return wrapped_pipe
|
||||
|
||||
|
||||
def frozen(_, __, ___):
|
||||
"""
|
||||
Prevent an attribute to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
raise FrozenAttributeError()
|
||||
|
||||
|
||||
def validate(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s validator on *new_value* if it has one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
if _config._run_validators is False:
|
||||
return new_value
|
||||
|
||||
v = attrib.validator
|
||||
if not v:
|
||||
return new_value
|
||||
|
||||
v(instance, attrib, new_value)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
def convert(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
||||
result.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
c = attrib.converter
|
||||
if c:
|
||||
return c(new_value)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
NO_OP = object()
|
||||
"""
|
||||
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
|
||||
Does not work in `pipe` or within lists.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
from typing import Any, NewType, NoReturn, TypeVar, cast
|
||||
|
||||
from . import Attribute, _OnSetAttrType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def frozen(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> NoReturn: ...
|
||||
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||
|
||||
# convert is allowed to return Any, because they can be chained using pipe.
|
||||
def convert(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> Any: ...
|
||||
|
||||
_NoOpType = NewType("_NoOpType", object)
|
||||
NO_OP: _NoOpType
|
||||
|
|
@ -1,561 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import operator
|
||||
import re
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
try:
|
||||
Pattern = re.Pattern
|
||||
except AttributeError: # Python <3.7 lacks a Pattern type.
|
||||
Pattern = type(re.compile(""))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"disabled",
|
||||
"ge",
|
||||
"get_disabled",
|
||||
"gt",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"le",
|
||||
"lt",
|
||||
"matches_re",
|
||||
"max_len",
|
||||
"optional",
|
||||
"provides",
|
||||
"set_disabled",
|
||||
]
|
||||
|
||||
|
||||
def set_disabled(disabled):
|
||||
"""
|
||||
Globally disable or enable running validators.
|
||||
|
||||
By default, they are run.
|
||||
|
||||
:param disabled: If ``True``, disable running all validators.
|
||||
:type disabled: bool
|
||||
|
||||
.. warning::
|
||||
|
||||
This function is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(not disabled)
|
||||
|
||||
|
||||
def get_disabled():
|
||||
"""
|
||||
Return a bool indicating whether validators are currently disabled or not.
|
||||
|
||||
:return: ``True`` if validators are currently disabled.
|
||||
:rtype: bool
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return not get_run_validators()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def disabled():
|
||||
"""
|
||||
Context manager that disables running validators within its context.
|
||||
|
||||
.. warning::
|
||||
|
||||
This context manager is not thread-safe!
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
set_run_validators(False)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
set_run_validators(True)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not isinstance(value, self.type):
|
||||
raise TypeError(
|
||||
"'{name}' must be {type!r} (got {value!r} that is a "
|
||||
"{actual!r}).".format(
|
||||
name=attr.name,
|
||||
type=self.type,
|
||||
actual=value.__class__,
|
||||
value=value,
|
||||
),
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<instance_of validator for type {type!r}>".format(
|
||||
type=self.type
|
||||
)
|
||||
|
||||
|
||||
def instance_of(type):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with a wrong type for this particular attribute (checks are performed using
|
||||
`isinstance` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
:param type: The type to check for.
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attrs.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MatchesReValidator(object):
|
||||
pattern = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
raise ValueError(
|
||||
"'{name}' must match regex {pattern!r}"
|
||||
" ({value!r} doesn't)".format(
|
||||
name=attr.name, pattern=self.pattern.pattern, value=value
|
||||
),
|
||||
attr,
|
||||
self.pattern,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<matches_re validator for pattern {pattern!r}>".format(
|
||||
pattern=self.pattern
|
||||
)
|
||||
|
||||
|
||||
def matches_re(regex, flags=0, func=None):
|
||||
r"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string that doesn't match *regex*.
|
||||
|
||||
:param regex: a regex string or precompiled pattern to match against
|
||||
:param int flags: flags that will be passed to the underlying re function
|
||||
(default 0)
|
||||
:param callable func: which underlying `re` function to call (options
|
||||
are `re.fullmatch`, `re.search`, `re.match`, default
|
||||
is ``None`` which means either `re.fullmatch` or an emulation of
|
||||
it on Python 2). For performance reasons, they won't be used directly
|
||||
but on a pre-`re.compile`\ ed pattern.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
|
||||
"""
|
||||
fullmatch = getattr(re, "fullmatch", None)
|
||||
valid_funcs = (fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
raise ValueError(
|
||||
"'func' must be one of {}.".format(
|
||||
", ".join(
|
||||
sorted(
|
||||
e and e.__name__ or "None" for e in set(valid_funcs)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if isinstance(regex, Pattern):
|
||||
if flags:
|
||||
raise TypeError(
|
||||
"'flags' can only be used with a string pattern; "
|
||||
"pass flags to re.compile() instead"
|
||||
)
|
||||
pattern = regex
|
||||
else:
|
||||
pattern = re.compile(regex, flags)
|
||||
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
elif fullmatch:
|
||||
match_func = pattern.fullmatch
|
||||
else: # Python 2 fullmatch emulation (https://bugs.python.org/issue16203)
|
||||
pattern = re.compile(
|
||||
r"(?:{})\Z".format(pattern.pattern), pattern.flags
|
||||
)
|
||||
match_func = pattern.match
|
||||
|
||||
return _MatchesReValidator(pattern, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.interface.providedBy(value):
|
||||
raise TypeError(
|
||||
"'{name}' must provide {interface!r} which {value!r} "
|
||||
"doesn't.".format(
|
||||
name=attr.name, interface=self.interface, value=value
|
||||
),
|
||||
attr,
|
||||
self.interface,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<provides validator for interface {interface!r}>".format(
|
||||
interface=self.interface
|
||||
)
|
||||
|
||||
|
||||
def provides(interface):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with an object that does not provide the requested *interface* (checks are
|
||||
performed using ``interface.providedBy(value)`` (see `zope.interface
|
||||
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
||||
|
||||
:param interface: The interface to check for.
|
||||
:type interface: ``zope.interface.Interface``
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attrs.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
self.validator(inst, attr, value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<optional validator for {what} or None>".format(
|
||||
what=repr(self.validator)
|
||||
)
|
||||
|
||||
|
||||
def optional(validator):
|
||||
"""
|
||||
A validator that makes an attribute optional. An optional attribute is one
|
||||
which can be set to ``None`` in addition to satisfying the requirements of
|
||||
the sub-validator.
|
||||
|
||||
:param validator: A validator (or a list of validators) that is used for
|
||||
non-``None`` values.
|
||||
:type validator: callable or `list` of callables.
|
||||
|
||||
.. versionadded:: 15.1.0
|
||||
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||
"""
|
||||
if isinstance(validator, list):
|
||||
return _OptionalValidator(_AndValidator(validator))
|
||||
return _OptionalValidator(validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
in_options = value in self.options
|
||||
except TypeError: # e.g. `1 in "abc"`
|
||||
in_options = False
|
||||
|
||||
if not in_options:
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<in_ validator with options {options!r}>".format(
|
||||
options=self.options
|
||||
)
|
||||
|
||||
|
||||
def in_(options):
|
||||
"""
|
||||
A validator that raises a `ValueError` if the initializer is called
|
||||
with a value that does not belong in the options provided. The check is
|
||||
performed using ``value in options``.
|
||||
|
||||
:param options: Allowed options.
|
||||
:type options: list, tuple, `enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type `attrs.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
return _InValidator(options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, hash=True)
|
||||
class _IsCallableValidator(object):
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not callable(value):
|
||||
message = (
|
||||
"'{name}' must be callable "
|
||||
"(got {value!r} that is a {actual!r})."
|
||||
)
|
||||
raise NotCallableError(
|
||||
msg=message.format(
|
||||
name=attr.name, value=value, actual=value.__class__
|
||||
),
|
||||
value=value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<is_callable validator>"
|
||||
|
||||
|
||||
def is_callable():
|
||||
"""
|
||||
A validator that raises a `attr.exceptions.NotCallableError` if the
|
||||
initializer is called with a value for this particular attribute
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises `attr.exceptions.NotCallableError`: With a human readable error
|
||||
message containing the attribute (`attrs.Attribute`) name,
|
||||
and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepIterable(object):
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.iterable_validator is not None:
|
||||
self.iterable_validator(inst, attr, value)
|
||||
|
||||
for member in value:
|
||||
self.member_validator(inst, attr, member)
|
||||
|
||||
def __repr__(self):
|
||||
iterable_identifier = (
|
||||
""
|
||||
if self.iterable_validator is None
|
||||
else " {iterable!r}".format(iterable=self.iterable_validator)
|
||||
)
|
||||
return (
|
||||
"<deep_iterable validator for{iterable_identifier}"
|
||||
" iterables of {member!r}>"
|
||||
).format(
|
||||
iterable_identifier=iterable_identifier,
|
||||
member=self.member_validator,
|
||||
)
|
||||
|
||||
|
||||
def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
:param member_validator: Validator to apply to iterable members
|
||||
:param iterable_validator: Validator to apply to iterable itself
|
||||
(optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepMapping(object):
|
||||
key_validator = attrib(validator=is_callable())
|
||||
value_validator = attrib(validator=is_callable())
|
||||
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.mapping_validator is not None:
|
||||
self.mapping_validator(inst, attr, value)
|
||||
|
||||
for key in value:
|
||||
self.key_validator(inst, attr, key)
|
||||
self.value_validator(inst, attr, value[key])
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<deep_mapping validator for objects mapping {key!r} to {value!r}>"
|
||||
).format(key=self.key_validator, value=self.value_validator)
|
||||
|
||||
|
||||
def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of a dictionary.
|
||||
|
||||
:param key_validator: Validator to apply to dictionary keys
|
||||
:param value_validator: Validator to apply to dictionary values
|
||||
:param mapping_validator: Validator to apply to top-level mapping
|
||||
attribute (optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _NumberValidator(object):
|
||||
bound = attrib()
|
||||
compare_op = attrib()
|
||||
compare_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.compare_func(value, self.bound):
|
||||
raise ValueError(
|
||||
"'{name}' must be {op} {bound}: {value}".format(
|
||||
name=attr.name,
|
||||
op=self.compare_op,
|
||||
bound=self.bound,
|
||||
value=value,
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Validator for x {op} {bound}>".format(
|
||||
op=self.compare_op, bound=self.bound
|
||||
)
|
||||
|
||||
|
||||
def lt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number larger or equal to *val*.
|
||||
|
||||
:param val: Exclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<", operator.lt)
|
||||
|
||||
|
||||
def le(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number greater than *val*.
|
||||
|
||||
:param val: Inclusive upper bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, "<=", operator.le)
|
||||
|
||||
|
||||
def ge(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller than *val*.
|
||||
|
||||
:param val: Inclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">=", operator.ge)
|
||||
|
||||
|
||||
def gt(val):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a number smaller or equal to *val*.
|
||||
|
||||
:param val: Exclusive lower bound for values
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _NumberValidator(val, ">", operator.gt)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MaxLengthValidator(object):
|
||||
max_length = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if len(value) > self.max_length:
|
||||
raise ValueError(
|
||||
"Length of '{name}' must be <= {max}: {len}".format(
|
||||
name=attr.name, max=self.max_length, len=len(value)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<max_len validator for {max}>".format(max=self.max_length)
|
||||
|
||||
|
||||
def max_len(length):
|
||||
"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string or iterable that is longer than *length*.
|
||||
|
||||
:param int length: Maximum length of the string or iterable
|
||||
|
||||
.. versionadded:: 21.3.0
|
||||
"""
|
||||
return _MaxLengthValidator(length)
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
Optional,
|
||||
Pattern,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from . import _ValidatorType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_I = TypeVar("_I", bound=Iterable)
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_M = TypeVar("_M", bound=Mapping)
|
||||
|
||||
def set_disabled(run: bool) -> None: ...
|
||||
def get_disabled() -> bool: ...
|
||||
def disabled() -> ContextManager[None]: ...
|
||||
|
||||
# To be more precise on instance_of use some overloads.
|
||||
# If there are more than 3 items in the tuple then we fall back to Any
|
||||
@overload
|
||||
def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2]]
|
||||
) -> _ValidatorType[Union[_T1, _T2]]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
|
||||
) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||
def provides(interface: Any) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
|
||||
) -> _ValidatorType[Optional[_T]]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
def matches_re(
|
||||
regex: Union[Pattern[AnyStr], AnyStr],
|
||||
flags: int = ...,
|
||||
func: Optional[
|
||||
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
|
||||
] = ...,
|
||||
) -> _ValidatorType[AnyStr]: ...
|
||||
def deep_iterable(
|
||||
member_validator: _ValidatorType[_T],
|
||||
iterable_validator: Optional[_ValidatorType[_I]] = ...,
|
||||
) -> _ValidatorType[_I]: ...
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorType[_K],
|
||||
value_validator: _ValidatorType[_V],
|
||||
mapping_validator: Optional[_ValidatorType[_M]] = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
def is_callable() -> _ValidatorType[_T]: ...
|
||||
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
__author__,
|
||||
__copyright__,
|
||||
__description__,
|
||||
__doc__,
|
||||
__email__,
|
||||
__license__,
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
__version_info__,
|
||||
assoc,
|
||||
cmp_using,
|
||||
define,
|
||||
evolve,
|
||||
field,
|
||||
fields,
|
||||
fields_dict,
|
||||
frozen,
|
||||
has,
|
||||
make_class,
|
||||
mutable,
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._next_gen import asdict, astuple
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__description__",
|
||||
"__doc__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__title__",
|
||||
"__url__",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"Attribute",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"Factory",
|
||||
"field",
|
||||
"fields_dict",
|
||||
"fields",
|
||||
"filters",
|
||||
"frozen",
|
||||
"has",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"NOTHING",
|
||||
"resolve_types",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
# Because we need to type our own stuff, we have to make everything from
|
||||
# attr explicitly public too.
|
||||
from attr import __author__ as __author__
|
||||
from attr import __copyright__ as __copyright__
|
||||
from attr import __description__ as __description__
|
||||
from attr import __email__ as __email__
|
||||
from attr import __license__ as __license__
|
||||
from attr import __title__ as __title__
|
||||
from attr import __url__ as __url__
|
||||
from attr import __version__ as __version__
|
||||
from attr import __version_info__ as __version_info__
|
||||
from attr import _FilterType
|
||||
from attr import assoc as assoc
|
||||
from attr import Attribute as Attribute
|
||||
from attr import define as define
|
||||
from attr import evolve as evolve
|
||||
from attr import Factory as Factory
|
||||
from attr import exceptions as exceptions
|
||||
from attr import field as field
|
||||
from attr import fields as fields
|
||||
from attr import fields_dict as fields_dict
|
||||
from attr import frozen as frozen
|
||||
from attr import has as has
|
||||
from attr import make_class as make_class
|
||||
from attr import mutable as mutable
|
||||
from attr import NOTHING as NOTHING
|
||||
from attr import resolve_types as resolve_types
|
||||
from attr import setters as setters
|
||||
from attr import validate as validate
|
||||
from attr import validators as validators
|
||||
|
||||
# TODO: see definition of attr.asdict/astuple
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: bool = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue