forked from forks/qmk_firmware
8d9bfdc254
* add some split data to info.json * add tags * add half of config_options.md to info.json * add support for designating master split * sort out split transport and primary * fix bad data in UNUSED_PINS * fixup custom transport * wip * allow for setting split right half keyboard matrix * add SPLIT_USB_DETECT * minor cleanup * fix an erroneous message * rework split.usb_detect * adding missing rgblight vars to info.json * add mouse_key to info.json * add all remaining options from docs/config_options.md * fix audio voices * qmk info: Change text output to use dotted notation * tweak layout output * resolve alias names * break out some functions to make flake8 happy * add a field for bootloader instructions * qmk generate-info-json: add a write-to-file argument Adds an argument that instructs qmk generate-info-json to write the output to a file instead of just to the terminal. * -arg_only, +action Because it was never my intention that one would have to specify a value for the argument that enables writing the file. * Bring qmk generate-info-json inline with other generate commands * pytest fixup * fix esca/getawayvan * fix data driven errors for bpiphany converters * features.force_nkro -> usb.force_nkro * split.primary->split.main * fix esca/getawayvan_f042 * fix the bpiphany converters for real * fix bpiphany/tiger_lily * Apply suggestions from code review Co-authored-by: Nick Brassel <nick@tzarc.org> * fix generate-api errors * fix matrix pin extraction for split boards * fix ploopyco/trackball_nano/rev1_001 Co-authored-by: James Young <18669334+noroadsleft@users.noreply.github.com> Co-authored-by: Nick Brassel <nick@tzarc.org>
89 lines
2.4 KiB
Python
89 lines
2.4 KiB
Python
"""Functions that help us generate and use info.json files.
|
|
"""
|
|
import json
|
|
from collections.abc import Mapping
|
|
from functools import lru_cache
|
|
from pathlib import Path
|
|
|
|
import hjson
|
|
import jsonschema
|
|
from milc import cli
|
|
|
|
|
|
def json_load(json_file):
|
|
"""Load a json file from disk.
|
|
|
|
Note: file must be a Path object.
|
|
"""
|
|
try:
|
|
return hjson.load(json_file.open(encoding='utf-8'))
|
|
|
|
except (json.decoder.JSONDecodeError, hjson.HjsonDecodeError) as e:
|
|
cli.log.error('Invalid JSON encountered attempting to load {fg_cyan}%s{fg_reset}:\n\t{fg_red}%s', json_file, e)
|
|
exit(1)
|
|
except Exception as e:
|
|
cli.log.error('Unknown error attempting to load {fg_cyan}%s{fg_reset}:\n\t{fg_red}%s', json_file, e)
|
|
exit(1)
|
|
|
|
|
|
@lru_cache(maxsize=0)
|
|
def load_jsonschema(schema_name):
|
|
"""Read a jsonschema file from disk.
|
|
"""
|
|
if Path(schema_name).exists():
|
|
return json_load(schema_name)
|
|
|
|
schema_path = Path(f'data/schemas/{schema_name}.jsonschema')
|
|
|
|
if not schema_path.exists():
|
|
schema_path = Path('data/schemas/false.jsonschema')
|
|
|
|
return json_load(schema_path)
|
|
|
|
|
|
@lru_cache(maxsize=0)
|
|
def compile_schema_store():
|
|
"""Compile all our schemas into a schema store.
|
|
"""
|
|
schema_store = {}
|
|
|
|
for schema_file in Path('data/schemas').glob('*.jsonschema'):
|
|
schema_data = load_jsonschema(schema_file)
|
|
if not isinstance(schema_data, dict):
|
|
cli.log.debug('Skipping schema file %s', schema_file)
|
|
continue
|
|
schema_store[schema_data['$id']] = schema_data
|
|
|
|
return schema_store
|
|
|
|
|
|
@lru_cache(maxsize=0)
|
|
def create_validator(schema):
|
|
"""Creates a validator for the given schema id.
|
|
"""
|
|
schema_store = compile_schema_store()
|
|
resolver = jsonschema.RefResolver.from_schema(schema_store['qmk.keyboard.v1'], store=schema_store)
|
|
|
|
return jsonschema.Draft7Validator(schema_store[schema], resolver=resolver).validate
|
|
|
|
|
|
def validate(data, schema):
|
|
"""Validates data against a schema.
|
|
"""
|
|
validator = create_validator(schema)
|
|
|
|
return validator(data)
|
|
|
|
|
|
def deep_update(origdict, newdict):
|
|
"""Update a dictionary in place, recursing to do a depth-first deep copy.
|
|
"""
|
|
for key, value in newdict.items():
|
|
if isinstance(value, Mapping):
|
|
origdict[key] = deep_update(origdict.get(key, {}), value)
|
|
|
|
else:
|
|
origdict[key] = value
|
|
|
|
return origdict
|