summaryrefslogtreecommitdiff
path: root/lib/python/qmk/info.py
diff options
context:
space:
mode:
authorNick Brassel <nick@tzarc.org>2021-02-28 07:22:21 +1100
committerNick Brassel <nick@tzarc.org>2021-02-28 07:22:21 +1100
commit1a5f6b54aff179732e3f4f4eb79e47454f0a1eb5 (patch)
treeebf645f55cb0442899c894765b1af4344fb734db /lib/python/qmk/info.py
parent804d5c1c5d59d9a12c1d793289ccbd59cb650ec2 (diff)
parent624359b725c9bfe8176cf72cdc2c8bbb7513949f (diff)
downloadqmk_firmware-1a5f6b54aff179732e3f4f4eb79e47454f0a1eb5.tar.gz
qmk_firmware-1a5f6b54aff179732e3f4f4eb79e47454f0a1eb5.zip
2021 February 27 Breaking Changes Update (#12040)
Diffstat (limited to 'lib/python/qmk/info.py')
-rw-r--r--lib/python/qmk/info.py487
1 files changed, 400 insertions, 87 deletions
diff --git a/lib/python/qmk/info.py b/lib/python/qmk/info.py
index f476dc666d..cf5dc6640b 100644
--- a/lib/python/qmk/info.py
+++ b/lib/python/qmk/info.py
@@ -1,9 +1,13 @@
"""Functions that help us generate and use info.json files.
"""
import json
+from collections.abc import Mapping
from glob import glob
from pathlib import Path
+import hjson
+import jsonschema
+from dotty_dict import dotty
from milc import cli
from qmk.constants import CHIBIOS_PROCESSORS, LUFA_PROCESSORS, VUSB_PROCESSORS
@@ -13,6 +17,9 @@ from qmk.keymap import list_keymaps
from qmk.makefile import parse_rules_mk_file
from qmk.math import compute
+true_values = ['1', 'on', 'yes']
+false_values = ['0', 'off', 'no']
+
def info_json(keyboard):
"""Generate the info.json data for a specific keyboard.
@@ -38,8 +45,14 @@ def info_json(keyboard):
info_data['keymaps'][keymap.name] = {'url': f'https://raw.githubusercontent.com/qmk/qmk_firmware/master/{keymap}/keymap.json'}
# Populate layout data
- for layout_name, layout_json in _find_all_layouts(info_data, keyboard, rules).items():
+ layouts, aliases = _find_all_layouts(info_data, keyboard)
+
+ if aliases:
+ info_data['layout_aliases'] = aliases
+
+ for layout_name, layout_json in layouts.items():
if not layout_name.startswith('LAYOUT_kc'):
+ layout_json['c_macro'] = True
info_data['layouts'][layout_name] = layout_json
# Merge in the data from info.json, config.h, and rules.mk
@@ -47,54 +60,259 @@ def info_json(keyboard):
info_data = _extract_config_h(info_data)
info_data = _extract_rules_mk(info_data)
+ # Validate against the jsonschema
+ try:
+ keyboard_api_validate(info_data)
+
+ except jsonschema.ValidationError as e:
+ json_path = '.'.join([str(p) for p in e.absolute_path])
+ cli.log.error('Invalid API data: %s: %s: %s', keyboard, json_path, e.message)
+ exit()
+
+ # Make sure we have at least one layout
+ if not info_data.get('layouts'):
+ _log_error(info_data, 'No LAYOUTs defined! Need at least one layout defined in the keyboard.h or info.json.')
+
+ # Make sure we supply layout macros for the community layouts we claim to support
+ # FIXME(skullydazed): This should be populated into info.json and read from there instead
+ if 'LAYOUTS' in rules and info_data.get('layouts'):
+ # Match these up against the supplied layouts
+ supported_layouts = rules['LAYOUTS'].strip().split()
+ for layout_name in sorted(info_data['layouts']):
+ layout_name = layout_name[7:]
+
+ if layout_name in supported_layouts:
+ supported_layouts.remove(layout_name)
+
+ if supported_layouts:
+ for supported_layout in supported_layouts:
+ _log_error(info_data, 'Claims to support community layout %s but no LAYOUT_%s() macro found' % (supported_layout, supported_layout))
+
return info_data
-def _extract_config_h(info_data):
- """Pull some keyboard information from existing rules.mk files
+def _json_load(json_file):
+ """Load a json file from disk.
+
+ Note: file must be a Path object.
+ """
+ try:
+ return hjson.load(json_file.open(encoding='utf-8'))
+
+ except json.decoder.JSONDecodeError as e:
+ cli.log.error('Invalid JSON encountered attempting to load {fg_cyan}%s{fg_reset}:\n\t{fg_red}%s', json_file, e)
+ exit(1)
+
+
+def _jsonschema(schema_name):
+ """Read a jsonschema file from disk.
+
+ FIXME(skullydazed/anyone): Refactor to make this a public function.
+ """
+ schema_path = Path(f'data/schemas/{schema_name}.jsonschema')
+
+ if not schema_path.exists():
+ schema_path = Path('data/schemas/false.jsonschema')
+
+ return _json_load(schema_path)
+
+
+def keyboard_validate(data):
+ """Validates data against the keyboard jsonschema.
+ """
+ schema = _jsonschema('keyboard')
+ validator = jsonschema.Draft7Validator(schema).validate
+
+ return validator(data)
+
+
+def keyboard_api_validate(data):
+ """Validates data against the api_keyboard jsonschema.
+ """
+ base = _jsonschema('keyboard')
+ relative = _jsonschema('api_keyboard')
+ resolver = jsonschema.RefResolver.from_schema(base)
+ validator = jsonschema.Draft7Validator(relative, resolver=resolver).validate
+
+ return validator(data)
+
+
+def _extract_features(info_data, rules):
+ """Find all the features enabled in rules.mk.
+ """
+ # Special handling for bootmagic which also supports a "lite" mode.
+ if rules.get('BOOTMAGIC_ENABLE') == 'lite':
+ rules['BOOTMAGIC_LITE_ENABLE'] = 'on'
+ del rules['BOOTMAGIC_ENABLE']
+ if rules.get('BOOTMAGIC_ENABLE') == 'full':
+ rules['BOOTMAGIC_ENABLE'] = 'on'
+
+ # Skip non-boolean features we haven't implemented special handling for
+ for feature in 'HAPTIC_ENABLE', 'QWIIC_ENABLE':
+ if rules.get(feature):
+ del rules[feature]
+
+ # Process the rest of the rules as booleans
+ for key, value in rules.items():
+ if key.endswith('_ENABLE'):
+ key = '_'.join(key.split('_')[:-1]).lower()
+ value = True if value.lower() in true_values else False if value.lower() in false_values else value
+
+ if 'config_h_features' not in info_data:
+ info_data['config_h_features'] = {}
+
+ if 'features' not in info_data:
+ info_data['features'] = {}
+
+ if key in info_data['features']:
+ _log_warning(info_data, 'Feature %s is specified in both info.json and rules.mk, the rules.mk value wins.' % (key,))
+
+ info_data['features'][key] = value
+ info_data['config_h_features'][key] = value
+
+ return info_data
+
+
+def _pin_name(pin):
+ """Returns the proper representation for a pin.
+ """
+ pin = pin.strip()
+
+ if not pin:
+ return None
+
+ elif pin.isdigit():
+ return int(pin)
+
+ elif pin == 'NO_PIN':
+ return None
+
+ elif pin[0] in 'ABCDEFGHIJK' and pin[1].isdigit():
+ return pin
+
+ raise ValueError(f'Invalid pin: {pin}')
+
+
+def _extract_pins(pins):
+ """Returns a list of pins from a comma separated string of pins.
+ """
+ return [_pin_name(pin) for pin in pins.split(',')]
+
+
+def _extract_direct_matrix(info_data, direct_pins):
+ """
+ """
+ info_data['matrix_pins'] = {}
+ direct_pin_array = []
+
+ while direct_pins[-1] != '}':
+ direct_pins = direct_pins[:-1]
+
+ for row in direct_pins.split('},{'):
+ if row.startswith('{'):
+ row = row[1:]
+
+ if row.endswith('}'):
+ row = row[:-1]
+
+ direct_pin_array.append([])
+
+ for pin in row.split(','):
+ if pin == 'NO_PIN':
+ pin = None
+
+ direct_pin_array[-1].append(pin)
+
+ return direct_pin_array
+
+
+def _extract_matrix_info(info_data, config_c):
+ """Populate the matrix information.
"""
- config_c = config_h(info_data['keyboard_folder'])
row_pins = config_c.get('MATRIX_ROW_PINS', '').replace('{', '').replace('}', '').strip()
col_pins = config_c.get('MATRIX_COL_PINS', '').replace('{', '').replace('}', '').strip()
direct_pins = config_c.get('DIRECT_PINS', '').replace(' ', '')[1:-1]
- info_data['diode_direction'] = config_c.get('DIODE_DIRECTION')
- info_data['matrix_size'] = {
- 'rows': compute(config_c.get('MATRIX_ROWS', '0')),
- 'cols': compute(config_c.get('MATRIX_COLS', '0')),
- }
- info_data['matrix_pins'] = {}
+ if 'MATRIX_ROWS' in config_c and 'MATRIX_COLS' in config_c:
+ if 'matrix_size' in info_data:
+ _log_warning(info_data, 'Matrix size is specified in both info.json and config.h, the config.h values win.')
+
+ info_data['matrix_size'] = {
+ 'cols': compute(config_c.get('MATRIX_COLS', '0')),
+ 'rows': compute(config_c.get('MATRIX_ROWS', '0')),
+ }
- if row_pins:
- info_data['matrix_pins']['rows'] = row_pins.split(',')
- if col_pins:
- info_data['matrix_pins']['cols'] = col_pins.split(',')
+ if row_pins and col_pins:
+ if 'matrix_pins' in info_data:
+ _log_warning(info_data, 'Matrix pins are specified in both info.json and config.h, the config.h values win.')
+
+ info_data['matrix_pins'] = {
+ 'cols': _extract_pins(col_pins),
+ 'rows': _extract_pins(row_pins),
+ }
if direct_pins:
- direct_pin_array = []
- for row in direct_pins.split('},{'):
- if row.startswith('{'):
- row = row[1:]
- if row.endswith('}'):
- row = row[:-1]
+ if 'matrix_pins' in info_data:
+ _log_warning(info_data, 'Direct pins are specified in both info.json and config.h, the config.h values win.')
+
+ info_data['matrix_pins']['direct'] = _extract_direct_matrix(info_data, direct_pins)
+
+ return info_data
+
- direct_pin_array.append([])
+def _extract_config_h(info_data):
+ """Pull some keyboard information from existing config.h files
+ """
+ config_c = config_h(info_data['keyboard_folder'])
- for pin in row.split(','):
- if pin == 'NO_PIN':
- pin = None
+ # Pull in data from the json map
+ dotty_info = dotty(info_data)
+ info_config_map = _json_load(Path('data/mappings/info_config.json'))
- direct_pin_array[-1].append(pin)
+ for config_key, info_dict in info_config_map.items():
+ info_key = info_dict['info_key']
+ key_type = info_dict.get('value_type', 'str')
- info_data['matrix_pins']['direct'] = direct_pin_array
+ try:
+ if config_key in config_c and info_dict.get('to_json', True):
+ if dotty_info.get(info_key) and info_dict.get('warn_duplicate', True):
+ _log_warning(info_data, '%s in config.h is overwriting %s in info.json' % (config_key, info_key))
- info_data['usb'] = {
- 'vid': config_c.get('VENDOR_ID'),
- 'pid': config_c.get('PRODUCT_ID'),
- 'device_ver': config_c.get('DEVICE_VER'),
- 'manufacturer': config_c.get('MANUFACTURER'),
- 'product': config_c.get('PRODUCT'),
- }
+ if key_type.startswith('array'):
+ if '.' in key_type:
+ key_type, array_type = key_type.split('.', 1)
+ else:
+ array_type = None
+
+ config_value = config_c[config_key].replace('{', '').replace('}', '').strip()
+
+ if array_type == 'int':
+ dotty_info[info_key] = list(map(int, config_value.split(',')))
+ else:
+ dotty_info[info_key] = config_value.split(',')
+
+ elif key_type == 'bool':
+ dotty_info[info_key] = config_c[config_key] in true_values
+
+ elif key_type == 'hex':
+ dotty_info[info_key] = '0x' + config_c[config_key][2:].upper()
+
+ elif key_type == 'list':
+ dotty_info[info_key] = config_c[config_key].split()
+
+ elif key_type == 'int':
+ dotty_info[info_key] = int(config_c[config_key])
+
+ else:
+ dotty_info[info_key] = config_c[config_key]
+
+ except Exception as e:
+ _log_warning(info_data, f'{config_key}->{info_key}: {e}')
+
+ info_data.update(dotty_info)
+
+ # Pull data that easily can't be mapped in json
+ _extract_matrix_info(info_data, config_c)
return info_data
@@ -103,63 +321,143 @@ def _extract_rules_mk(info_data):
"""Pull some keyboard information from existing rules.mk files
"""
rules = rules_mk(info_data['keyboard_folder'])
- mcu = rules.get('MCU')
+ info_data['processor'] = rules.get('MCU', info_data.get('processor', 'atmega32u4'))
+
+ if info_data['processor'] in CHIBIOS_PROCESSORS:
+ arm_processor_rules(info_data, rules)
+
+ elif info_data['processor'] in LUFA_PROCESSORS + VUSB_PROCESSORS:
+ avr_processor_rules(info_data, rules)
+
+ else:
+ cli.log.warning("%s: Unknown MCU: %s" % (info_data['keyboard_folder'], info_data['processor']))
+ unknown_processor_rules(info_data, rules)
+
+ # Pull in data from the json map
+ dotty_info = dotty(info_data)
+ info_rules_map = _json_load(Path('data/mappings/info_rules.json'))
+
+ for rules_key, info_dict in info_rules_map.items():
+ info_key = info_dict['info_key']
+ key_type = info_dict.get('value_type', 'str')
+
+ try:
+ if rules_key in rules and info_dict.get('to_json', True):
+ if dotty_info.get(info_key) and info_dict.get('warn_duplicate', True):
+ _log_warning(info_data, '%s in rules.mk is overwriting %s in info.json' % (rules_key, info_key))
+
+ if key_type.startswith('array'):
+ if '.' in key_type:
+ key_type, array_type = key_type.split('.', 1)
+ else:
+ array_type = None
+
+ rules_value = rules[rules_key].replace('{', '').replace('}', '').strip()
+
+ if array_type == 'int':
+ dotty_info[info_key] = list(map(int, rules_value.split(',')))
+ else:
+ dotty_info[info_key] = rules_value.split(',')
+
+ elif key_type == 'list':
+ dotty_info[info_key] = rules[rules_key].split()
+
+ elif key_type == 'bool':
+ dotty_info[info_key] = rules[rules_key] in true_values
- if mcu in CHIBIOS_PROCESSORS:
- return arm_processor_rules(info_data, rules)
+ elif key_type == 'hex':
+ dotty_info[info_key] = '0x' + rules[rules_key][2:].upper()
- elif mcu in LUFA_PROCESSORS + VUSB_PROCESSORS:
- return avr_processor_rules(info_data, rules)
+ elif key_type == 'int':
+ dotty_info[info_key] = int(rules[rules_key])
- msg = "Unknown MCU: " + str(mcu)
+ else:
+ dotty_info[info_key] = rules[rules_key]
- _log_warning(info_data, msg)
+ except Exception as e:
+ _log_warning(info_data, f'{rules_key}->{info_key}: {e}')
+
+ info_data.update(dotty_info)
+
+ # Merge in config values that can't be easily mapped
+ _extract_features(info_data, rules)
+
+ return info_data
- return unknown_processor_rules(info_data, rules)
+
+def _merge_layouts(info_data, new_info_data):
+ """Merge new_info_data into info_data in an intelligent way.
+ """
+ for layout_name, layout_json in new_info_data['layouts'].items():
+ if layout_name in info_data['layouts']:
+ # Pull in layouts we have a macro for
+ if len(info_data['layouts'][layout_name]['layout']) != len(layout_json['layout']):
+ msg = '%s: %s: Number of elements in info.json does not match! info.json:%s != %s:%s'
+ _log_error(info_data, msg % (info_data['keyboard_folder'], layout_name, len(layout_json['layout']), layout_name, len(info_data['layouts'][layout_name]['layout'])))
+ else:
+ for i, key in enumerate(info_data['layouts'][layout_name]['layout']):
+ key.update(layout_json['layout'][i])
+ else:
+ # Pull in layouts that have matrix data
+ missing_matrix = False
+ for key in layout_json.get('layout', {}):
+ if 'matrix' not in key:
+ missing_matrix = True
+
+ if not missing_matrix:
+ if layout_name in info_data['layouts']:
+ # Update an existing layout with new data
+ for i, key in enumerate(info_data['layouts'][layout_name]['layout']):
+ key.update(layout_json['layout'][i])
+
+ else:
+ # Copy in the new layout wholesale
+ layout_json['c_macro'] = False
+ info_data['layouts'][layout_name] = layout_json
+
+ return info_data
def _search_keyboard_h(path):
current_path = Path('keyboards/')
+ aliases = {}
layouts = {}
+
for directory in path.parts:
current_path = current_path / directory
keyboard_h = '%s.h' % (directory,)
keyboard_h_path = current_path / keyboard_h
if keyboard_h_path.exists():
- layouts.update(find_layouts(keyboard_h_path))
+ new_layouts, new_aliases = find_layouts(keyboard_h_path)
+ layouts.update(new_layouts)
+
+ for alias, alias_text in new_aliases.items():
+ if alias_text in layouts:
+ aliases[alias] = alias_text
- return layouts
+ return layouts, aliases
-def _find_all_layouts(info_data, keyboard, rules):
+def _find_all_layouts(info_data, keyboard):
"""Looks for layout macros associated with this keyboard.
"""
- layouts = _search_keyboard_h(Path(keyboard))
+ layouts, aliases = _search_keyboard_h(Path(keyboard))
if not layouts:
- # If we didn't find any layouts above we widen our search. This is error
- # prone which is why we want to encourage people to follow the standard above.
- _log_warning(info_data, 'Falling back to searching for KEYMAP/LAYOUT macros.')
+ # If we don't find any layouts from info.json or keyboard.h we widen our search. This is error prone which is why we want to encourage people to follow the standard above.
+ info_data['parse_warnings'].append('%s: Falling back to searching for KEYMAP/LAYOUT macros.' % (keyboard))
+
for file in glob('keyboards/%s/*.h' % keyboard):
if file.endswith('.h'):
- these_layouts = find_layouts(file)
+ these_layouts, these_aliases = find_layouts(file)
+
if these_layouts:
layouts.update(these_layouts)
- if 'LAYOUTS' in rules:
- # Match these up against the supplied layouts
- supported_layouts = rules['LAYOUTS'].strip().split()
- for layout_name in sorted(layouts):
- if not layout_name.startswith('LAYOUT_'):
- continue
- layout_name = layout_name[7:]
- if layout_name in supported_layouts:
- supported_layouts.remove(layout_name)
-
- if supported_layouts:
- _log_error(info_data, 'Missing LAYOUT() macro for %s' % (', '.join(supported_layouts)))
+ if these_aliases:
+ aliases.update(these_aliases)
- return layouts
+ return layouts, aliases
def _log_error(info_data, message):
@@ -180,13 +478,13 @@ def arm_processor_rules(info_data, rules):
"""Setup the default info for an ARM board.
"""
info_data['processor_type'] = 'arm'
- info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'unknown'
- info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown'
info_data['protocol'] = 'ChibiOS'
- if info_data['bootloader'] == 'unknown':
+ if 'bootloader' not in info_data:
if 'STM32' in info_data['processor']:
info_data['bootloader'] = 'stm32-dfu'
+ else:
+ info_data['bootloader'] = 'unknown'
if 'STM32' in info_data['processor']:
info_data['platform'] = 'STM32'
@@ -202,11 +500,12 @@ def avr_processor_rules(info_data, rules):
"""Setup the default info for an AVR board.
"""
info_data['processor_type'] = 'avr'
- info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'atmel-dfu'
info_data['platform'] = rules['ARCH'] if 'ARCH' in rules else 'unknown'
- info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown'
info_data['protocol'] = 'V-USB' if rules.get('MCU') in VUSB_PROCESSORS else 'LUFA'
+ if 'bootloader' not in info_data:
+ info_data['bootloader'] = 'atmel-dfu'
+
# FIXME(fauxpark/anyone): Eventually we should detect the protocol by looking at PROTOCOL inherited from mcu_selection.mk:
# info_data['protocol'] = 'V-USB' if rules.get('PROTOCOL') == 'VUSB' else 'LUFA'
@@ -225,38 +524,52 @@ def unknown_processor_rules(info_data, rules):
return info_data
+def deep_update(origdict, newdict):
+ """Update a dictionary in place, recursing to do a deep copy.
+ """
+ for key, value in newdict.items():
+ if isinstance(value, Mapping):
+ origdict[key] = deep_update(origdict.get(key, {}), value)
+
+ else:
+ origdict[key] = value
+
+ return origdict
+
+
def merge_info_jsons(keyboard, info_data):
"""Return a merged copy of all the info.json files for a keyboard.
"""
for info_file in find_info_json(keyboard):
# Load and validate the JSON data
- try:
- with info_file.open('r') as info_fd:
- new_info_data = json.load(info_fd)
- except Exception as e:
- _log_error(info_data, "Invalid JSON in file %s: %s: %s" % (str(info_file), e.__class__.__name__, e))
- continue
+ new_info_data = _json_load(info_file)
if not isinstance(new_info_data, dict):
_log_error(info_data, "Invalid file %s, root object should be a dictionary." % (str(info_file),))
continue
- # Copy whitelisted keys into `info_data`
- for key in ('keyboard_name', 'manufacturer', 'identifier', 'url', 'maintainer', 'processor', 'bootloader', 'width', 'height'):
- if key in new_info_data:
- info_data[key] = new_info_data[key]
+ try:
+ keyboard_validate(new_info_data)
+ except jsonschema.ValidationError as e:
+ json_path = '.'.join([str(p) for p in e.absolute_path])
+ cli.log.error('Not including data from file: %s', info_file)
+ cli.log.error('\t%s: %s', json_path, e.message)
+ continue
- # Merge the layouts in
+ # Merge layout data in
+ for layout_name, layout in new_info_data.get('layouts', {}).items():
+ if layout_name in info_data['layouts']:
+ for new_key, existing_key in zip(layout['layout'], info_data['layouts'][layout_name]['layout']):
+ existing_key.update(new_key)
+ else:
+ layout['c_macro'] = False
+ info_data['layouts'][layout_name] = layout
+
+ # Update info_data with the new data
if 'layouts' in new_info_data:
- for layout_name, json_layout in new_info_data['layouts'].items():
- # Only pull in layouts we have a macro for
- if layout_name in info_data['layouts']:
- if info_data['layouts'][layout_name]['key_count'] != len(json_layout['layout']):
- msg = '%s: Number of elements in info.json does not match! info.json:%s != %s:%s'
- _log_error(info_data, msg % (layout_name, len(json_layout['layout']), layout_name, len(info_data['layouts'][layout_name]['layout'])))
- else:
- for i, key in enumerate(info_data['layouts'][layout_name]['layout']):
- key.update(json_layout['layout'][i])
+ del (new_info_data['layouts'])
+
+ deep_update(info_data, new_info_data)
return info_data