mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-27 06:12:19 +01:00
reorganized config setting tool to usable as tool
This commit is contained in:
parent
57cac7135d
commit
1ac12eadb2
24 changed files with 251 additions and 460 deletions
7
pype/tools/config_setting/__init__.py
Normal file
7
pype/tools/config_setting/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
from config_setting import style, MainWidget
|
||||
|
||||
|
||||
__all__ = (
|
||||
"style",
|
||||
"MainWidget"
|
||||
)
|
||||
18
pype/tools/config_setting/__main__.py
Normal file
18
pype/tools/config_setting/__main__.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
import config_setting
|
||||
from Qt import QtWidgets, QtGui
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
stylesheet = config_setting.style.load_stylesheet()
|
||||
app.setStyleSheet(stylesheet)
|
||||
app.setWindowIcon(QtGui.QIcon(config_setting.style.app_icon_path()))
|
||||
|
||||
widget = config_setting.MainWidget()
|
||||
widget.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
10
pype/tools/config_setting/config_setting/__init__.py
Normal file
10
pype/tools/config_setting/config_setting/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
from . import style
|
||||
# from . import widgets
|
||||
from .widgets import MainWidget
|
||||
|
||||
|
||||
__all__ = (
|
||||
"style",
|
||||
# "widgets",
|
||||
"MainWidget"
|
||||
)
|
||||
|
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 3.7 KiB |
19
pype/tools/config_setting/config_setting/widgets/__init__.py
Normal file
19
pype/tools/config_setting/config_setting/widgets/__init__.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from .lib import (
|
||||
NOT_SET,
|
||||
AS_WIDGET,
|
||||
METADATA_KEY,
|
||||
OVERRIDE_VERSION,
|
||||
convert_gui_data_to_overrides,
|
||||
convert_overrides_to_gui_data,
|
||||
TypeToKlass
|
||||
)
|
||||
|
||||
|
||||
from .base import (
|
||||
PypeConfigurationWidget,
|
||||
StudioWidget,
|
||||
ProjectWidget
|
||||
)
|
||||
from .main import MainWidget
|
||||
|
||||
from .inputs import *
|
||||
|
|
@ -1,16 +1,12 @@
|
|||
import os
|
||||
import json
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
from . import config
|
||||
from pype.api import config
|
||||
from .widgets import UnsavedChangesDialog
|
||||
from .lib import NOT_SET, METADATA_KEY, convert_gui_data_to_overrides
|
||||
from . import lib
|
||||
from avalon import io
|
||||
|
||||
|
||||
class TypeToKlass:
|
||||
types = {}
|
||||
|
||||
|
||||
class PypeConfigurationWidget:
|
||||
default_state = ""
|
||||
|
||||
|
|
@ -23,7 +19,7 @@ class PypeConfigurationWidget:
|
|||
|
||||
def value_from_values(self, values, keys=None):
|
||||
if not values:
|
||||
return NOT_SET
|
||||
return lib.NOT_SET
|
||||
|
||||
if keys is None:
|
||||
keys = self.keys
|
||||
|
|
@ -36,7 +32,7 @@ class PypeConfigurationWidget:
|
|||
)
|
||||
|
||||
if key not in value:
|
||||
return NOT_SET
|
||||
return lib.NOT_SET
|
||||
value = value[key]
|
||||
return value
|
||||
|
||||
|
|
@ -109,7 +105,7 @@ class StudioWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
self.input_fields.clear()
|
||||
|
||||
values = {"studio": config.studio_presets()}
|
||||
schema = config.gui_schema("studio_schema", "studio_gui_schema")
|
||||
schema = lib.gui_schema("studio_schema", "studio_gui_schema")
|
||||
self.keys = schema.get("keys", [])
|
||||
self.add_children_gui(schema, values)
|
||||
self.schema = schema
|
||||
|
|
@ -129,7 +125,7 @@ class StudioWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
# Load studio data with metadata
|
||||
current_presets = config.studio_presets()
|
||||
|
||||
keys_to_file = config.file_keys_from_schema(self.schema)
|
||||
keys_to_file = lib.file_keys_from_schema(self.schema)
|
||||
for key_sequence in keys_to_file:
|
||||
# Skip first key
|
||||
key_sequence = key_sequence[1:]
|
||||
|
|
@ -158,7 +154,7 @@ class StudioWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
|
||||
def add_children_gui(self, child_configuration, values):
|
||||
item_type = child_configuration["type"]
|
||||
klass = TypeToKlass.types.get(item_type)
|
||||
klass = lib.TypeToKlass.types.get(item_type)
|
||||
item = klass(
|
||||
child_configuration, values, self.keys, self
|
||||
)
|
||||
|
|
@ -354,14 +350,14 @@ class ProjectWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
|
||||
def reset(self):
|
||||
values = config.global_project_presets()
|
||||
schema = config.gui_schema("projects_schema", "project_gui_schema")
|
||||
schema = lib.gui_schema("projects_schema", "project_gui_schema")
|
||||
self.keys = schema.get("keys", [])
|
||||
self.add_children_gui(schema, values)
|
||||
self.schema = schema
|
||||
|
||||
def add_children_gui(self, child_configuration, values):
|
||||
item_type = child_configuration["type"]
|
||||
klass = TypeToKlass.types.get(item_type)
|
||||
klass = lib.TypeToKlass.types.get(item_type)
|
||||
|
||||
item = klass(
|
||||
child_configuration, values, self.keys, self
|
||||
|
|
@ -394,7 +390,7 @@ class ProjectWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
_data = {}
|
||||
for item in self.input_fields:
|
||||
value, is_group = item.overrides()
|
||||
if value is not NOT_SET:
|
||||
if value is not lib.NOT_SET:
|
||||
_data.update(value)
|
||||
if is_group:
|
||||
raise Exception(
|
||||
|
|
@ -402,7 +398,7 @@ class ProjectWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
)
|
||||
|
||||
data = _data.get("project") or {}
|
||||
output_data = convert_gui_data_to_overrides(data)
|
||||
output_data = lib.convert_gui_data_to_overrides(data)
|
||||
|
||||
overrides_json_path = config.path_to_project_overrides(
|
||||
self.project_name
|
||||
|
|
@ -437,7 +433,7 @@ class ProjectWidget(QtWidgets.QWidget, PypeConfigurationWidget):
|
|||
# Load studio data with metadata
|
||||
current_presets = config.global_project_presets()
|
||||
|
||||
keys_to_file = config.file_keys_from_schema(self.schema)
|
||||
keys_to_file = lib.file_keys_from_schema(self.schema)
|
||||
for key_sequence in keys_to_file:
|
||||
# Skip first key
|
||||
key_sequence = key_sequence[1:]
|
||||
|
|
@ -1,14 +1,14 @@
|
|||
import json
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
from . import config
|
||||
from .base import PypeConfigurationWidget, TypeToKlass
|
||||
from pype.api import config
|
||||
from .base import PypeConfigurationWidget
|
||||
from .widgets import (
|
||||
ClickableWidget,
|
||||
ExpandingWidget,
|
||||
ModifiedIntSpinBox,
|
||||
ModifiedFloatSpinBox
|
||||
)
|
||||
from .lib import NOT_SET, AS_WIDGET, METADATA_KEY
|
||||
from .lib import NOT_SET, AS_WIDGET, METADATA_KEY, TypeToKlass
|
||||
|
||||
|
||||
class SchemeGroupHierarchyBug(Exception):
|
||||
182
pype/tools/config_setting/config_setting/widgets/lib.py
Normal file
182
pype/tools/config_setting/config_setting/widgets/lib.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import os
|
||||
import json
|
||||
import copy
|
||||
from pype.api import config
|
||||
|
||||
OVERRIDEN_KEY = config.OVERRIDEN_KEY
|
||||
|
||||
|
||||
# Singleton database of available inputs
|
||||
class TypeToKlass:
|
||||
types = {}
|
||||
|
||||
|
||||
NOT_SET = type("NOT_SET", (), {})
|
||||
AS_WIDGET = type("AS_WIDGET", (), {})
|
||||
METADATA_KEY = type("METADATA_KEY", (), {})
|
||||
OVERRIDE_VERSION = 1
|
||||
|
||||
|
||||
def convert_gui_data_to_overrides(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if first:
|
||||
output["__override_version__"] = OVERRIDE_VERSION
|
||||
|
||||
if METADATA_KEY in data:
|
||||
metadata = data.pop(METADATA_KEY)
|
||||
for key, value in metadata.items():
|
||||
if key == "groups":
|
||||
output[OVERRIDEN_KEY] = value
|
||||
else:
|
||||
KeyError("Unknown metadata key \"{}\"".format(key))
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_gui_data_to_overrides(value, False)
|
||||
return output
|
||||
|
||||
|
||||
def convert_overrides_to_gui_data(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if OVERRIDEN_KEY in data:
|
||||
groups = data.pop(OVERRIDEN_KEY)
|
||||
if METADATA_KEY not in output:
|
||||
output[METADATA_KEY] = {}
|
||||
output[METADATA_KEY]["groups"] = groups
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_overrides_to_gui_data(value, False)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
|
||||
def replace_inner_schemas(schema_data, schema_collection):
|
||||
if schema_data["type"] == "schema":
|
||||
raise ValueError("First item in schema data can't be schema.")
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return schema_data
|
||||
|
||||
new_children = []
|
||||
for child in children:
|
||||
if child["type"] != "schema":
|
||||
new_child = replace_inner_schemas(child, schema_collection)
|
||||
new_children.append(new_child)
|
||||
continue
|
||||
|
||||
for schema_name in child["children"]:
|
||||
new_child = replace_inner_schemas(
|
||||
schema_collection[schema_name],
|
||||
schema_collection
|
||||
)
|
||||
new_children.append(new_child)
|
||||
|
||||
schema_data["children"] = new_children
|
||||
return schema_data
|
||||
|
||||
|
||||
class SchemaMissingFileInfo(Exception):
|
||||
def __init__(self, invalid):
|
||||
full_path_keys = []
|
||||
for item in invalid:
|
||||
full_path_keys.append("\"{}\"".format("/".join(item)))
|
||||
|
||||
msg = (
|
||||
"Schema has missing definition of output file (\"is_file\" key)"
|
||||
" for keys. [{}]"
|
||||
).format(", ".join(full_path_keys))
|
||||
super(SchemaMissingFileInfo, self).__init__(msg)
|
||||
|
||||
|
||||
def file_keys_from_schema(schema_data):
|
||||
output = []
|
||||
keys = []
|
||||
key = schema_data.get("key")
|
||||
if key:
|
||||
keys.append(key)
|
||||
|
||||
for child in schema_data["children"]:
|
||||
if child.get("is_file"):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.append(child["key"])
|
||||
output.append(_keys)
|
||||
continue
|
||||
|
||||
for result in file_keys_from_schema(child):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.extend(result)
|
||||
output.append(_keys)
|
||||
return output
|
||||
|
||||
|
||||
def validate_all_has_ending_file(schema_data, is_top=True):
|
||||
if schema_data.get("is_file"):
|
||||
return None
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return [[schema_data["key"]]]
|
||||
|
||||
invalid = []
|
||||
keyless = "key" not in schema_data
|
||||
for child in children:
|
||||
result = validate_all_has_ending_file(child, False)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
if keyless:
|
||||
invalid.extend(result)
|
||||
else:
|
||||
for item in result:
|
||||
new_invalid = [schema_data["key"]]
|
||||
new_invalid.extend(item)
|
||||
invalid.append(new_invalid)
|
||||
|
||||
if not invalid:
|
||||
return None
|
||||
|
||||
if not is_top:
|
||||
return invalid
|
||||
|
||||
raise SchemaMissingFileInfo(invalid)
|
||||
|
||||
|
||||
def validate_schema(schema_data):
|
||||
# TODO validator for key uniquenes
|
||||
# TODO validator that is_group key is not before is_file child
|
||||
# TODO validator that is_group or is_file is not on child without key
|
||||
validate_all_has_ending_file(schema_data)
|
||||
|
||||
|
||||
def gui_schema(subfolder, main_schema_name):
|
||||
subfolder, main_schema_name
|
||||
dirpath = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)),
|
||||
"config_gui_schema",
|
||||
subfolder
|
||||
)
|
||||
|
||||
loaded_schemas = {}
|
||||
for filename in os.listdir(dirpath):
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext != ".json":
|
||||
continue
|
||||
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
with open(filepath, "r") as json_stream:
|
||||
schema_data = json.load(json_stream)
|
||||
loaded_schemas[basename] = schema_data
|
||||
|
||||
main_schema = replace_inner_schemas(
|
||||
loaded_schemas[main_schema_name],
|
||||
loaded_schemas
|
||||
)
|
||||
validate_schema(main_schema)
|
||||
return main_schema
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def folder_up(path, times=1):
|
||||
if times <= 0:
|
||||
return path
|
||||
return folder_up(os.path.dirname(path), times - 1)
|
||||
|
||||
|
||||
PYPE_SETUP_PATH = folder_up(os.path.realpath(__file__), 6)
|
||||
os.environ["PYPE_CONFIG"] = os.path.join(
|
||||
PYPE_SETUP_PATH, "repos", "pype-config"
|
||||
)
|
||||
os.environ["AVALON_MONGO"] = "mongodb://localhost:2707"
|
||||
sys_paths = (
|
||||
"C:/Users/Public/pype_env2/Lib/site-packages",
|
||||
PYPE_SETUP_PATH,
|
||||
os.path.join(PYPE_SETUP_PATH, "repos", "pype"),
|
||||
os.path.join(PYPE_SETUP_PATH, "repos", "avalon-core"),
|
||||
os.path.join(PYPE_SETUP_PATH, "repos", "pyblish-base")
|
||||
)
|
||||
for path in sys_paths:
|
||||
sys.path.append(path)
|
||||
|
||||
from widgets import main
|
||||
import style
|
||||
from Qt import QtWidgets, QtGui
|
||||
|
||||
|
||||
class MyApp(QtWidgets.QApplication):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MyApp, self).__init__(*args, **kwargs)
|
||||
stylesheet = style.load_stylesheet()
|
||||
self.setStyleSheet(stylesheet)
|
||||
self.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = MyApp(sys.argv)
|
||||
|
||||
# main_widget = QtWidgets.QWidget()
|
||||
# main_widget.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
||||
#
|
||||
# layout = QtWidgets.QVBoxLayout(main_widget)
|
||||
#
|
||||
# widget = main.MainWidget(main_widget)
|
||||
|
||||
# layout.addWidget(widget)
|
||||
# main_widget.setLayout(layout)
|
||||
# main_widget.show()
|
||||
|
||||
widget = main.MainWidget()
|
||||
widget.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
from .lib import NOT_SET, AS_WIDGET, METADATA_KEY
|
||||
|
||||
|
||||
from .base import *
|
||||
from .main import *
|
||||
from .inputs import *
|
||||
|
|
@ -1,325 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import logging
|
||||
import copy
|
||||
|
||||
# DEBUG SETUP
|
||||
os.environ["PYPE_CONFIG"] = os.path.dirname(os.path.dirname(__file__))
|
||||
os.environ["PYPE_PROJECT_CONFIGS"] = os.path.join(
|
||||
os.environ["PYPE_CONFIG"], "config", "project_overrides"
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
STUDIO_PRESETS_PATH = os.path.normpath(
|
||||
os.path.join(os.environ["PYPE_CONFIG"], "config", "studio_presets")
|
||||
)
|
||||
PROJECT_CONFIGURATION_DIR = "project_presets"
|
||||
PROJECT_PRESETS_PATH = os.path.normpath(os.path.join(
|
||||
os.environ["PYPE_CONFIG"], "config", PROJECT_CONFIGURATION_DIR
|
||||
))
|
||||
first_run = False
|
||||
|
||||
# TODO key popping not implemented yet
|
||||
POP_KEY = "__pop_key__"
|
||||
OVERRIDEN_KEY = "__overriden_keys__"
|
||||
|
||||
|
||||
def load_json(fpath):
|
||||
# Load json data
|
||||
with open(fpath, "r") as opened_file:
|
||||
lines = opened_file.read().splitlines()
|
||||
|
||||
# prepare json string
|
||||
standard_json = ""
|
||||
for line in lines:
|
||||
# Remove all whitespace on both sides
|
||||
line = line.strip()
|
||||
|
||||
# Skip blank lines
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
standard_json += line
|
||||
|
||||
# Check if has extra commas
|
||||
extra_comma = False
|
||||
if ",]" in standard_json or ",}" in standard_json:
|
||||
extra_comma = True
|
||||
standard_json = standard_json.replace(",]", "]")
|
||||
standard_json = standard_json.replace(",}", "}")
|
||||
|
||||
global first_run
|
||||
if extra_comma and first_run:
|
||||
log.error("Extra comma in json file: \"{}\"".format(fpath))
|
||||
|
||||
# return empty dict if file is empty
|
||||
if standard_json == "":
|
||||
if first_run:
|
||||
log.error("Empty json file: \"{}\"".format(fpath))
|
||||
return {}
|
||||
|
||||
# Try to parse string
|
||||
try:
|
||||
return json.loads(standard_json)
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
# Return empty dict if it is first time that decode error happened
|
||||
if not first_run:
|
||||
return {}
|
||||
|
||||
# Repreduce the exact same exception but traceback contains better
|
||||
# information about position of error in the loaded json
|
||||
try:
|
||||
with open(fpath, "r") as opened_file:
|
||||
json.load(opened_file)
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
log.warning(
|
||||
"File has invalid json format \"{}\"".format(fpath),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def subkey_merge(_dict, value, keys):
|
||||
key = keys.pop(0)
|
||||
if not keys:
|
||||
_dict[key] = value
|
||||
return _dict
|
||||
|
||||
if key not in _dict:
|
||||
_dict[key] = {}
|
||||
_dict[key] = subkey_merge(_dict[key], value, keys)
|
||||
|
||||
return _dict
|
||||
|
||||
|
||||
def load_jsons_from_dir(path, *args, **kwargs):
|
||||
output = {}
|
||||
|
||||
path = os.path.normpath(path)
|
||||
if not os.path.exists(path):
|
||||
# TODO warning
|
||||
return output
|
||||
|
||||
sub_keys = list(kwargs.pop("subkeys", args))
|
||||
for sub_key in tuple(sub_keys):
|
||||
_path = os.path.join(path, sub_key)
|
||||
if not os.path.exists(_path):
|
||||
break
|
||||
|
||||
path = _path
|
||||
sub_keys.pop(0)
|
||||
|
||||
base_len = len(path) + 1
|
||||
for base, _directories, filenames in os.walk(path):
|
||||
base_items_str = base[base_len:]
|
||||
if not base_items_str:
|
||||
base_items = []
|
||||
else:
|
||||
base_items = base_items_str.split(os.path.sep)
|
||||
|
||||
for filename in filenames:
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext == ".json":
|
||||
full_path = os.path.join(base, filename)
|
||||
value = load_json(full_path)
|
||||
dict_keys = base_items + [basename]
|
||||
output = subkey_merge(output, value, dict_keys)
|
||||
|
||||
for sub_key in sub_keys:
|
||||
output = output[sub_key]
|
||||
return output
|
||||
|
||||
|
||||
def studio_presets(*args, **kwargs):
|
||||
return load_jsons_from_dir(STUDIO_PRESETS_PATH, *args, **kwargs)
|
||||
|
||||
|
||||
def global_project_presets(**kwargs):
|
||||
return load_jsons_from_dir(PROJECT_PRESETS_PATH, **kwargs)
|
||||
|
||||
|
||||
def path_to_project_overrides(project_name):
|
||||
project_configs_path = os.environ["PYPE_PROJECT_CONFIGS"]
|
||||
dirpath = os.path.join(project_configs_path, project_name)
|
||||
return os.path.join(dirpath, PROJECT_CONFIGURATION_DIR + ".json")
|
||||
|
||||
|
||||
def project_preset_overrides(project_name, **kwargs):
|
||||
if not project_name:
|
||||
return {}
|
||||
|
||||
path_to_json = path_to_project_overrides(project_name)
|
||||
if not os.path.exists(path_to_json):
|
||||
return {}
|
||||
return load_json(path_to_json)
|
||||
|
||||
|
||||
def merge_overrides(global_dict, override_dict):
|
||||
if OVERRIDEN_KEY in override_dict:
|
||||
overriden_keys = set(override_dict.pop(OVERRIDEN_KEY))
|
||||
else:
|
||||
overriden_keys = set()
|
||||
|
||||
for key, value in override_dict.items():
|
||||
if value == POP_KEY:
|
||||
global_dict.pop(key)
|
||||
|
||||
elif (
|
||||
key in overriden_keys
|
||||
or key not in global_dict
|
||||
):
|
||||
global_dict[key] = value
|
||||
|
||||
elif isinstance(value, dict) and isinstance(global_dict[key], dict):
|
||||
global_dict[key] = merge_overrides(global_dict[key], value)
|
||||
|
||||
else:
|
||||
global_dict[key] = value
|
||||
return global_dict
|
||||
|
||||
|
||||
def apply_overrides(global_presets, project_overrides):
|
||||
global_presets = copy.deepcopy(global_presets)
|
||||
if not project_overrides:
|
||||
return global_presets
|
||||
return merge_overrides(global_presets, project_overrides)
|
||||
|
||||
|
||||
def project_presets(project_name=None, **kwargs):
|
||||
global_presets = global_project_presets(**kwargs)
|
||||
|
||||
if not project_name:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
project_overrides = project_preset_overrides(project_name, **kwargs)
|
||||
|
||||
return apply_overrides(global_presets, project_overrides)
|
||||
|
||||
|
||||
def replace_inner_schemas(schema_data, schema_collection):
|
||||
if schema_data["type"] == "schema":
|
||||
raise ValueError("First item in schema data can't be schema.")
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return schema_data
|
||||
|
||||
new_children = []
|
||||
for child in children:
|
||||
if child["type"] != "schema":
|
||||
new_child = replace_inner_schemas(child, schema_collection)
|
||||
new_children.append(new_child)
|
||||
continue
|
||||
|
||||
for schema_name in child["children"]:
|
||||
new_child = replace_inner_schemas(
|
||||
schema_collection[schema_name],
|
||||
schema_collection
|
||||
)
|
||||
new_children.append(new_child)
|
||||
|
||||
schema_data["children"] = new_children
|
||||
return schema_data
|
||||
|
||||
|
||||
class SchemaMissingFileInfo(Exception):
|
||||
def __init__(self, invalid):
|
||||
full_path_keys = []
|
||||
for item in invalid:
|
||||
full_path_keys.append("\"{}\"".format("/".join(item)))
|
||||
|
||||
msg = (
|
||||
"Schema has missing definition of output file (\"is_file\" key)"
|
||||
" for keys. [{}]"
|
||||
).format(", ".join(full_path_keys))
|
||||
super(SchemaMissingFileInfo, self).__init__(msg)
|
||||
|
||||
|
||||
def file_keys_from_schema(schema_data):
|
||||
output = []
|
||||
keys = []
|
||||
key = schema_data.get("key")
|
||||
if key:
|
||||
keys.append(key)
|
||||
|
||||
for child in schema_data["children"]:
|
||||
if child.get("is_file"):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.append(child["key"])
|
||||
output.append(_keys)
|
||||
continue
|
||||
|
||||
for result in file_keys_from_schema(child):
|
||||
_keys = copy.deepcopy(keys)
|
||||
_keys.extend(result)
|
||||
output.append(_keys)
|
||||
return output
|
||||
|
||||
|
||||
def validate_all_has_ending_file(schema_data, is_top=True):
|
||||
if schema_data.get("is_file"):
|
||||
return None
|
||||
|
||||
children = schema_data.get("children")
|
||||
if not children:
|
||||
return [[schema_data["key"]]]
|
||||
|
||||
invalid = []
|
||||
keyless = "key" not in schema_data
|
||||
for child in children:
|
||||
result = validate_all_has_ending_file(child, False)
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
if keyless:
|
||||
invalid.extend(result)
|
||||
else:
|
||||
for item in result:
|
||||
new_invalid = [schema_data["key"]]
|
||||
new_invalid.extend(item)
|
||||
invalid.append(new_invalid)
|
||||
|
||||
if not invalid:
|
||||
return None
|
||||
|
||||
if not is_top:
|
||||
return invalid
|
||||
|
||||
raise SchemaMissingFileInfo(invalid)
|
||||
|
||||
|
||||
def validate_schema(schema_data):
|
||||
# TODO validator for key uniquenes
|
||||
# TODO validator that is_group key is not before is_file child
|
||||
# TODO validator that is_group or is_file is not on child without key
|
||||
validate_all_has_ending_file(schema_data)
|
||||
|
||||
|
||||
def gui_schema(subfolder, main_schema_name):
|
||||
subfolder, main_schema_name
|
||||
dirpath = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)),
|
||||
"config_gui_schema",
|
||||
subfolder
|
||||
)
|
||||
|
||||
loaded_schemas = {}
|
||||
for filename in os.listdir(dirpath):
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext != ".json":
|
||||
continue
|
||||
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
with open(filepath, "r") as json_stream:
|
||||
schema_data = json.load(json_stream)
|
||||
loaded_schemas[basename] = schema_data
|
||||
|
||||
main_schema = replace_inner_schemas(
|
||||
loaded_schemas[main_schema_name],
|
||||
loaded_schemas
|
||||
)
|
||||
validate_schema(main_schema)
|
||||
return main_schema
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
from .config import OVERRIDEN_KEY
|
||||
|
||||
|
||||
class CustomNone:
|
||||
"""Created object can be used as custom None (not equal to None)."""
|
||||
def __bool__(self):
|
||||
"""Return False (like default None)."""
|
||||
return False
|
||||
|
||||
|
||||
NOT_SET = CustomNone()
|
||||
AS_WIDGET = type("AS_WIDGET", (), {})
|
||||
|
||||
METADATA_KEY = type("METADATA_KEY", (), {})
|
||||
|
||||
OVERRIDE_VERSION = 1
|
||||
|
||||
|
||||
def convert_gui_data_to_overrides(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if first:
|
||||
output["__override_version__"] = OVERRIDE_VERSION
|
||||
|
||||
if METADATA_KEY in data:
|
||||
metadata = data.pop(METADATA_KEY)
|
||||
for key, value in metadata.items():
|
||||
if key == "groups":
|
||||
output[OVERRIDEN_KEY] = value
|
||||
else:
|
||||
KeyError("Unknown metadata key \"{}\"".format(key))
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_gui_data_to_overrides(value, False)
|
||||
return output
|
||||
|
||||
|
||||
def convert_overrides_to_gui_data(data, first=True):
|
||||
if not data or not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
output = {}
|
||||
if OVERRIDEN_KEY in data:
|
||||
groups = data.pop(OVERRIDEN_KEY)
|
||||
if METADATA_KEY not in output:
|
||||
output[METADATA_KEY] = {}
|
||||
output[METADATA_KEY]["groups"] = groups
|
||||
|
||||
for key, value in data.items():
|
||||
output[key] = convert_overrides_to_gui_data(value, False)
|
||||
|
||||
return output
|
||||
Loading…
Add table
Add a link
Reference in a new issue