mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #859 from pypeclub/feature/remove_presets_getter
Remove presets getter
This commit is contained in:
commit
65f9e2c603
3 changed files with 38 additions and 258 deletions
|
|
@ -10,17 +10,13 @@ from .mongo import (
|
|||
get_default_components,
|
||||
PypeMongoConnection
|
||||
)
|
||||
from .anatomy import Anatomy
|
||||
|
||||
from .config import (
|
||||
get_datetime_data,
|
||||
load_json,
|
||||
collect_json_from_path,
|
||||
get_presets,
|
||||
get_init_presets,
|
||||
update_dict
|
||||
from .anatomy import (
|
||||
merge_dict,
|
||||
Anatomy
|
||||
)
|
||||
|
||||
from .config import get_datetime_data
|
||||
|
||||
from .env_tools import (
|
||||
env_value_to_bool,
|
||||
get_paths_from_environ
|
||||
|
|
@ -98,9 +94,6 @@ __all__ = [
|
|||
"get_latest_version",
|
||||
"BuildWorkfile",
|
||||
|
||||
"PypeHook",
|
||||
"execute_hook",
|
||||
|
||||
"ApplicationLaunchFailed",
|
||||
"ApplictionExecutableNotFound",
|
||||
"ApplicationNotFound",
|
||||
|
|
@ -127,13 +120,12 @@ __all__ = [
|
|||
"_subprocess",
|
||||
|
||||
"terminal",
|
||||
|
||||
"merge_dict",
|
||||
"Anatomy",
|
||||
|
||||
"get_datetime_data",
|
||||
"load_json",
|
||||
"collect_json_from_path",
|
||||
"get_presets",
|
||||
"get_init_presets",
|
||||
"update_dict",
|
||||
|
||||
"execute",
|
||||
"PypeLogger",
|
||||
"decompose_url",
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from pype.settings.lib import (
|
|||
get_default_anatomy_settings,
|
||||
get_anatomy_settings
|
||||
)
|
||||
from . import config
|
||||
from .log import PypeLogger
|
||||
|
||||
log = PypeLogger().get_logger(__name__)
|
||||
|
|
@ -20,6 +19,32 @@ except NameError:
|
|||
StringType = str
|
||||
|
||||
|
||||
def merge_dict(main_dict, enhance_dict):
|
||||
"""Merges dictionaries by keys.
|
||||
|
||||
Function call itself if value on key is again dictionary.
|
||||
|
||||
Args:
|
||||
main_dict (dict): First dict to merge second one into.
|
||||
enhance_dict (dict): Second dict to be merged.
|
||||
|
||||
Returns:
|
||||
dict: Merged result.
|
||||
|
||||
.. note:: does not overrides whole value on first found key
|
||||
but only values differences from enhance_dict
|
||||
|
||||
"""
|
||||
for key, value in enhance_dict.items():
|
||||
if key not in main_dict:
|
||||
main_dict[key] = value
|
||||
elif isinstance(value, dict) and isinstance(main_dict[key], dict):
|
||||
main_dict[key] = merge_dict(main_dict[key], value)
|
||||
else:
|
||||
main_dict[key] = value
|
||||
return main_dict
|
||||
|
||||
|
||||
class ProjectNotSet(Exception):
|
||||
"""Exception raised when is created Anatomy without project name."""
|
||||
|
||||
|
|
@ -395,9 +420,7 @@ class TemplatesDict(dict):
|
|||
if key in invalid_types:
|
||||
continue
|
||||
_invalid_types[key] = val
|
||||
invalid_types = config.update_dict(
|
||||
invalid_types, _invalid_types
|
||||
)
|
||||
invalid_types = merge_dict(invalid_types, _invalid_types)
|
||||
return invalid_types
|
||||
|
||||
@property
|
||||
|
|
@ -405,7 +428,7 @@ class TemplatesDict(dict):
|
|||
"""Return used values for all children templates."""
|
||||
used_values = {}
|
||||
for value in self.values():
|
||||
used_values = config.update_dict(used_values, value.used_values)
|
||||
used_values = merge_dict(used_values, value.used_values)
|
||||
return used_values
|
||||
|
||||
def get_solved(self):
|
||||
|
|
@ -840,7 +863,7 @@ class Templates:
|
|||
|
||||
root_key = "{" + root_key + "}"
|
||||
|
||||
roots_dict = config.update_dict(
|
||||
roots_dict = merge_dict(
|
||||
roots_dict,
|
||||
self._keys_to_dicts(used_root_keys, root_key)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Get configuration data."""
|
||||
import os
|
||||
import json
|
||||
import datetime
|
||||
from .log import PypeLogger
|
||||
|
||||
log = PypeLogger().get_logger(__name__)
|
||||
|
||||
|
||||
def get_datetime_data(datetime_obj=None):
|
||||
|
|
@ -79,233 +74,3 @@ def get_datetime_data(datetime_obj=None):
|
|||
"S": str(int(seconds)),
|
||||
"SS": str(seconds),
|
||||
}
|
||||
|
||||
|
||||
def load_json(fpath, first_run=False):
|
||||
"""Load JSON data.
|
||||
|
||||
Args:
|
||||
fpath (str): Path to JSON file.
|
||||
first_run (bool): Flag to run checks if file is loaded for the first
|
||||
time.
|
||||
Returns:
|
||||
dict: parsed JSON object.
|
||||
|
||||
"""
|
||||
# Load json data
|
||||
with open(fpath, "r") as opened_file:
|
||||
lines = opened_file.read().splitlines()
|
||||
|
||||
# prepare json string
|
||||
standard_json = ""
|
||||
for line in lines:
|
||||
# Remove all whitespace on both sides
|
||||
line = line.strip()
|
||||
|
||||
# Skip blank lines
|
||||
if len(line) == 0:
|
||||
continue
|
||||
|
||||
standard_json += line
|
||||
|
||||
# Check if has extra commas
|
||||
extra_comma = False
|
||||
if ",]" in standard_json or ",}" in standard_json:
|
||||
extra_comma = True
|
||||
standard_json = standard_json.replace(",]", "]")
|
||||
standard_json = standard_json.replace(",}", "}")
|
||||
|
||||
if extra_comma and first_run:
|
||||
log.error("Extra comma in json file: \"{}\"".format(fpath))
|
||||
|
||||
# return empty dict if file is empty
|
||||
if standard_json == "":
|
||||
if first_run:
|
||||
log.error("Empty json file: \"{}\"".format(fpath))
|
||||
return {}
|
||||
|
||||
# Try to parse string
|
||||
try:
|
||||
return json.loads(standard_json)
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
# Return empty dict if it is first time that decode error happened
|
||||
if not first_run:
|
||||
return {}
|
||||
|
||||
# Repreduce the exact same exception but traceback contains better
|
||||
# information about position of error in the loaded json
|
||||
try:
|
||||
with open(fpath, "r") as opened_file:
|
||||
json.load(opened_file)
|
||||
|
||||
except json.decoder.JSONDecodeError:
|
||||
log.warning(
|
||||
"File has invalid json format \"{}\"".format(fpath),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def collect_json_from_path(input_path, first_run=False):
|
||||
"""Collect JSON file from path.
|
||||
|
||||
Iterate through all subfolders and JSON files in `input_path`.
|
||||
|
||||
Args:
|
||||
input_path (str): Path from JSONs will be collected.
|
||||
first_run (bool): Flag to run checks if file is loaded for the first
|
||||
time.
|
||||
|
||||
Returns:
|
||||
dict: Collected JSONs.
|
||||
|
||||
Examples:
|
||||
|
||||
Imagine path::
|
||||
`{input_path}/path/to/file.json`
|
||||
|
||||
>>> collect_json_from_path(input_path)
|
||||
{'path':
|
||||
{'to':
|
||||
{'file': {JSON}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
output = None
|
||||
if os.path.isdir(input_path):
|
||||
output = {}
|
||||
for file in os.listdir(input_path):
|
||||
full_path = os.path.sep.join([input_path, file])
|
||||
if os.path.isdir(full_path):
|
||||
loaded = collect_json_from_path(full_path, first_run)
|
||||
if loaded:
|
||||
output[file] = loaded
|
||||
else:
|
||||
basename, ext = os.path.splitext(os.path.basename(file))
|
||||
if ext == '.json':
|
||||
output[basename] = load_json(full_path, first_run)
|
||||
else:
|
||||
basename, ext = os.path.splitext(os.path.basename(input_path))
|
||||
if ext == '.json':
|
||||
output = load_json(input_path, first_run)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_presets(project=None, first_run=False):
|
||||
"""Loads preset files with usage of ``collect_json_from_path``.
|
||||
|
||||
Default preset path is set to: `{PYPE_CONFIG}/presets`
|
||||
Project preset path is set to: `{PYPE_PROJECT_CONFIGS}/project_name`
|
||||
|
||||
Environment variable `PYPE_STUDIO_CONFIG` is required
|
||||
`PYPE_STUDIO_CONFIGS` only if want to use overrides per project.
|
||||
|
||||
Args:
|
||||
project (str): Project name.
|
||||
first_run (bool): Flag to run checks if file is loaded for the first
|
||||
time.
|
||||
|
||||
Returns:
|
||||
None: If default path does not exist.
|
||||
default presets (dict): If project_name is not set or
|
||||
if project's presets folder does not exist.
|
||||
project presets (dict): If project_name is set and include
|
||||
override data.
|
||||
|
||||
"""
|
||||
# config_path should be set from environments?
|
||||
config_path = os.path.normpath(os.environ['PYPE_CONFIG'])
|
||||
preset_items = [config_path, 'presets']
|
||||
config_path = os.path.sep.join(preset_items)
|
||||
if not os.path.isdir(config_path):
|
||||
log.error('Preset path was not found: "{}"'.format(config_path))
|
||||
return None
|
||||
default_data = collect_json_from_path(config_path, first_run)
|
||||
|
||||
if not project:
|
||||
project = os.environ.get('AVALON_PROJECT', None)
|
||||
|
||||
if not project:
|
||||
return default_data
|
||||
|
||||
project_configs_path = os.environ.get('PYPE_PROJECT_CONFIGS')
|
||||
if not project_configs_path:
|
||||
return default_data
|
||||
|
||||
project_configs_path = os.path.normpath(project_configs_path)
|
||||
project_config_items = [project_configs_path, project, 'presets']
|
||||
project_config_path = os.path.sep.join(project_config_items)
|
||||
|
||||
if not os.path.isdir(project_config_path):
|
||||
log.warning('Preset path for project {} not found: "{}"'.format(
|
||||
project, project_config_path
|
||||
))
|
||||
return default_data
|
||||
project_data = collect_json_from_path(project_config_path, first_run)
|
||||
|
||||
return update_dict(default_data, project_data)
|
||||
|
||||
|
||||
def get_init_presets(project=None):
|
||||
"""Loads content of presets.
|
||||
|
||||
Like :func:`get_presets()`` but also evaluate `init.json`
|
||||
pointer to default presets.
|
||||
|
||||
Args:
|
||||
project(str): Project name.
|
||||
|
||||
Returns:
|
||||
None: If default path does not exist
|
||||
default presets (dict): If project_name is not set or if project's
|
||||
presets folder does not exist.
|
||||
project presets (dict): If project_name is set and include
|
||||
override data.
|
||||
"""
|
||||
presets = get_presets(project)
|
||||
|
||||
try:
|
||||
# try if it is not in projects custom directory
|
||||
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
|
||||
# init.json define preset names to be used
|
||||
p_init = presets["init"]
|
||||
presets["colorspace"] = presets["colorspace"][p_init["colorspace"]]
|
||||
presets["dataflow"] = presets["dataflow"][p_init["dataflow"]]
|
||||
except KeyError:
|
||||
log.warning("No projects custom preset available...")
|
||||
presets["colorspace"] = presets["colorspace"]["default"]
|
||||
presets["dataflow"] = presets["dataflow"]["default"]
|
||||
log.info(("Presets `colorspace` and `dataflow` "
|
||||
"loaded from `default`..."))
|
||||
|
||||
return presets
|
||||
|
||||
|
||||
def update_dict(main_dict, enhance_dict):
|
||||
"""Merges dictionaries by keys.
|
||||
|
||||
Function call itself if value on key is again dictionary.
|
||||
|
||||
Args:
|
||||
main_dict (dict): First dict to merge second one into.
|
||||
enhance_dict (dict): Second dict to be merged.
|
||||
|
||||
Returns:
|
||||
dict: Merged result.
|
||||
|
||||
.. note:: does not overrides whole value on first found key
|
||||
but only values differences from enhance_dict
|
||||
|
||||
"""
|
||||
for key, value in enhance_dict.items():
|
||||
if key not in main_dict:
|
||||
main_dict[key] = value
|
||||
elif isinstance(value, dict) and isinstance(main_dict[key], dict):
|
||||
main_dict[key] = update_dict(main_dict[key], value)
|
||||
else:
|
||||
main_dict[key] = value
|
||||
return main_dict
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue