diff options
Diffstat (limited to 'lib/python/qmk/info.py')
| -rw-r--r-- | lib/python/qmk/info.py | 487 |
1 files changed, 400 insertions, 87 deletions
diff --git a/lib/python/qmk/info.py b/lib/python/qmk/info.py index f476dc666..cf5dc6640 100644 --- a/lib/python/qmk/info.py +++ b/lib/python/qmk/info.py | |||
| @@ -1,9 +1,13 @@ | |||
| 1 | """Functions that help us generate and use info.json files. | 1 | """Functions that help us generate and use info.json files. |
| 2 | """ | 2 | """ |
| 3 | import json | 3 | import json |
| 4 | from collections.abc import Mapping | ||
| 4 | from glob import glob | 5 | from glob import glob |
| 5 | from pathlib import Path | 6 | from pathlib import Path |
| 6 | 7 | ||
| 8 | import hjson | ||
| 9 | import jsonschema | ||
| 10 | from dotty_dict import dotty | ||
| 7 | from milc import cli | 11 | from milc import cli |
| 8 | 12 | ||
| 9 | from qmk.constants import CHIBIOS_PROCESSORS, LUFA_PROCESSORS, VUSB_PROCESSORS | 13 | from qmk.constants import CHIBIOS_PROCESSORS, LUFA_PROCESSORS, VUSB_PROCESSORS |
| @@ -13,6 +17,9 @@ from qmk.keymap import list_keymaps | |||
| 13 | from qmk.makefile import parse_rules_mk_file | 17 | from qmk.makefile import parse_rules_mk_file |
| 14 | from qmk.math import compute | 18 | from qmk.math import compute |
| 15 | 19 | ||
| 20 | true_values = ['1', 'on', 'yes'] | ||
| 21 | false_values = ['0', 'off', 'no'] | ||
| 22 | |||
| 16 | 23 | ||
| 17 | def info_json(keyboard): | 24 | def info_json(keyboard): |
| 18 | """Generate the info.json data for a specific keyboard. | 25 | """Generate the info.json data for a specific keyboard. |
| @@ -38,8 +45,14 @@ def info_json(keyboard): | |||
| 38 | info_data['keymaps'][keymap.name] = {'url': f'https://raw.githubusercontent.com/qmk/qmk_firmware/master/{keymap}/keymap.json'} | 45 | info_data['keymaps'][keymap.name] = {'url': f'https://raw.githubusercontent.com/qmk/qmk_firmware/master/{keymap}/keymap.json'} |
| 39 | 46 | ||
| 40 | # Populate layout data | 47 | # Populate layout data |
| 41 | for layout_name, layout_json in _find_all_layouts(info_data, keyboard, rules).items(): | 48 | layouts, aliases = _find_all_layouts(info_data, keyboard) |
| 49 | |||
| 50 | if aliases: | ||
| 51 | info_data['layout_aliases'] = aliases | ||
| 52 | |||
| 53 | for layout_name, layout_json in layouts.items(): | ||
| 42 | if not layout_name.startswith('LAYOUT_kc'): | 54 | if not layout_name.startswith('LAYOUT_kc'): |
| 55 | layout_json['c_macro'] = True | ||
| 43 | info_data['layouts'][layout_name] = layout_json | 56 | info_data['layouts'][layout_name] = layout_json |
| 44 | 57 | ||
| 45 | # Merge in the data from info.json, config.h, and rules.mk | 58 | # Merge in the data from info.json, config.h, and rules.mk |
| @@ -47,54 +60,259 @@ def info_json(keyboard): | |||
| 47 | info_data = _extract_config_h(info_data) | 60 | info_data = _extract_config_h(info_data) |
| 48 | info_data = _extract_rules_mk(info_data) | 61 | info_data = _extract_rules_mk(info_data) |
| 49 | 62 | ||
| 63 | # Validate against the jsonschema | ||
| 64 | try: | ||
| 65 | keyboard_api_validate(info_data) | ||
| 66 | |||
| 67 | except jsonschema.ValidationError as e: | ||
| 68 | json_path = '.'.join([str(p) for p in e.absolute_path]) | ||
| 69 | cli.log.error('Invalid API data: %s: %s: %s', keyboard, json_path, e.message) | ||
| 70 | exit() | ||
| 71 | |||
| 72 | # Make sure we have at least one layout | ||
| 73 | if not info_data.get('layouts'): | ||
| 74 | _log_error(info_data, 'No LAYOUTs defined! Need at least one layout defined in the keyboard.h or info.json.') | ||
| 75 | |||
| 76 | # Make sure we supply layout macros for the community layouts we claim to support | ||
| 77 | # FIXME(skullydazed): This should be populated into info.json and read from there instead | ||
| 78 | if 'LAYOUTS' in rules and info_data.get('layouts'): | ||
| 79 | # Match these up against the supplied layouts | ||
| 80 | supported_layouts = rules['LAYOUTS'].strip().split() | ||
| 81 | for layout_name in sorted(info_data['layouts']): | ||
| 82 | layout_name = layout_name[7:] | ||
| 83 | |||
| 84 | if layout_name in supported_layouts: | ||
| 85 | supported_layouts.remove(layout_name) | ||
| 86 | |||
| 87 | if supported_layouts: | ||
| 88 | for supported_layout in supported_layouts: | ||
| 89 | _log_error(info_data, 'Claims to support community layout %s but no LAYOUT_%s() macro found' % (supported_layout, supported_layout)) | ||
| 90 | |||
| 50 | return info_data | 91 | return info_data |
| 51 | 92 | ||
| 52 | 93 | ||
| 53 | def _extract_config_h(info_data): | 94 | def _json_load(json_file): |
| 54 | """Pull some keyboard information from existing rules.mk files | 95 | """Load a json file from disk. |
| 96 | |||
| 97 | Note: file must be a Path object. | ||
| 98 | """ | ||
| 99 | try: | ||
| 100 | return hjson.load(json_file.open(encoding='utf-8')) | ||
| 101 | |||
| 102 | except json.decoder.JSONDecodeError as e: | ||
| 103 | cli.log.error('Invalid JSON encountered attempting to load {fg_cyan}%s{fg_reset}:\n\t{fg_red}%s', json_file, e) | ||
| 104 | exit(1) | ||
| 105 | |||
| 106 | |||
| 107 | def _jsonschema(schema_name): | ||
| 108 | """Read a jsonschema file from disk. | ||
| 109 | |||
| 110 | FIXME(skullydazed/anyone): Refactor to make this a public function. | ||
| 111 | """ | ||
| 112 | schema_path = Path(f'data/schemas/{schema_name}.jsonschema') | ||
| 113 | |||
| 114 | if not schema_path.exists(): | ||
| 115 | schema_path = Path('data/schemas/false.jsonschema') | ||
| 116 | |||
| 117 | return _json_load(schema_path) | ||
| 118 | |||
| 119 | |||
| 120 | def keyboard_validate(data): | ||
| 121 | """Validates data against the keyboard jsonschema. | ||
| 122 | """ | ||
| 123 | schema = _jsonschema('keyboard') | ||
| 124 | validator = jsonschema.Draft7Validator(schema).validate | ||
| 125 | |||
| 126 | return validator(data) | ||
| 127 | |||
| 128 | |||
| 129 | def keyboard_api_validate(data): | ||
| 130 | """Validates data against the api_keyboard jsonschema. | ||
| 131 | """ | ||
| 132 | base = _jsonschema('keyboard') | ||
| 133 | relative = _jsonschema('api_keyboard') | ||
| 134 | resolver = jsonschema.RefResolver.from_schema(base) | ||
| 135 | validator = jsonschema.Draft7Validator(relative, resolver=resolver).validate | ||
| 136 | |||
| 137 | return validator(data) | ||
| 138 | |||
| 139 | |||
| 140 | def _extract_features(info_data, rules): | ||
| 141 | """Find all the features enabled in rules.mk. | ||
| 142 | """ | ||
| 143 | # Special handling for bootmagic which also supports a "lite" mode. | ||
| 144 | if rules.get('BOOTMAGIC_ENABLE') == 'lite': | ||
| 145 | rules['BOOTMAGIC_LITE_ENABLE'] = 'on' | ||
| 146 | del rules['BOOTMAGIC_ENABLE'] | ||
| 147 | if rules.get('BOOTMAGIC_ENABLE') == 'full': | ||
| 148 | rules['BOOTMAGIC_ENABLE'] = 'on' | ||
| 149 | |||
| 150 | # Skip non-boolean features we haven't implemented special handling for | ||
| 151 | for feature in 'HAPTIC_ENABLE', 'QWIIC_ENABLE': | ||
| 152 | if rules.get(feature): | ||
| 153 | del rules[feature] | ||
| 154 | |||
| 155 | # Process the rest of the rules as booleans | ||
| 156 | for key, value in rules.items(): | ||
| 157 | if key.endswith('_ENABLE'): | ||
| 158 | key = '_'.join(key.split('_')[:-1]).lower() | ||
| 159 | value = True if value.lower() in true_values else False if value.lower() in false_values else value | ||
| 160 | |||
| 161 | if 'config_h_features' not in info_data: | ||
| 162 | info_data['config_h_features'] = {} | ||
| 163 | |||
| 164 | if 'features' not in info_data: | ||
| 165 | info_data['features'] = {} | ||
| 166 | |||
| 167 | if key in info_data['features']: | ||
| 168 | _log_warning(info_data, 'Feature %s is specified in both info.json and rules.mk, the rules.mk value wins.' % (key,)) | ||
| 169 | |||
| 170 | info_data['features'][key] = value | ||
| 171 | info_data['config_h_features'][key] = value | ||
| 172 | |||
| 173 | return info_data | ||
| 174 | |||
| 175 | |||
| 176 | def _pin_name(pin): | ||
| 177 | """Returns the proper representation for a pin. | ||
| 178 | """ | ||
| 179 | pin = pin.strip() | ||
| 180 | |||
| 181 | if not pin: | ||
| 182 | return None | ||
| 183 | |||
| 184 | elif pin.isdigit(): | ||
| 185 | return int(pin) | ||
| 186 | |||
| 187 | elif pin == 'NO_PIN': | ||
| 188 | return None | ||
| 189 | |||
| 190 | elif pin[0] in 'ABCDEFGHIJK' and pin[1].isdigit(): | ||
| 191 | return pin | ||
| 192 | |||
| 193 | raise ValueError(f'Invalid pin: {pin}') | ||
| 194 | |||
| 195 | |||
| 196 | def _extract_pins(pins): | ||
| 197 | """Returns a list of pins from a comma separated string of pins. | ||
| 198 | """ | ||
| 199 | return [_pin_name(pin) for pin in pins.split(',')] | ||
| 200 | |||
| 201 | |||
| 202 | def _extract_direct_matrix(info_data, direct_pins): | ||
| 203 | """ | ||
| 204 | """ | ||
| 205 | info_data['matrix_pins'] = {} | ||
| 206 | direct_pin_array = [] | ||
| 207 | |||
| 208 | while direct_pins[-1] != '}': | ||
| 209 | direct_pins = direct_pins[:-1] | ||
| 210 | |||
| 211 | for row in direct_pins.split('},{'): | ||
| 212 | if row.startswith('{'): | ||
| 213 | row = row[1:] | ||
| 214 | |||
| 215 | if row.endswith('}'): | ||
| 216 | row = row[:-1] | ||
| 217 | |||
| 218 | direct_pin_array.append([]) | ||
| 219 | |||
| 220 | for pin in row.split(','): | ||
| 221 | if pin == 'NO_PIN': | ||
| 222 | pin = None | ||
| 223 | |||
| 224 | direct_pin_array[-1].append(pin) | ||
| 225 | |||
| 226 | return direct_pin_array | ||
| 227 | |||
| 228 | |||
| 229 | def _extract_matrix_info(info_data, config_c): | ||
| 230 | """Populate the matrix information. | ||
| 55 | """ | 231 | """ |
| 56 | config_c = config_h(info_data['keyboard_folder']) | ||
| 57 | row_pins = config_c.get('MATRIX_ROW_PINS', '').replace('{', '').replace('}', '').strip() | 232 | row_pins = config_c.get('MATRIX_ROW_PINS', '').replace('{', '').replace('}', '').strip() |
| 58 | col_pins = config_c.get('MATRIX_COL_PINS', '').replace('{', '').replace('}', '').strip() | 233 | col_pins = config_c.get('MATRIX_COL_PINS', '').replace('{', '').replace('}', '').strip() |
| 59 | direct_pins = config_c.get('DIRECT_PINS', '').replace(' ', '')[1:-1] | 234 | direct_pins = config_c.get('DIRECT_PINS', '').replace(' ', '')[1:-1] |
| 60 | 235 | ||
| 61 | info_data['diode_direction'] = config_c.get('DIODE_DIRECTION') | 236 | if 'MATRIX_ROWS' in config_c and 'MATRIX_COLS' in config_c: |
| 62 | info_data['matrix_size'] = { | 237 | if 'matrix_size' in info_data: |
| 63 | 'rows': compute(config_c.get('MATRIX_ROWS', '0')), | 238 | _log_warning(info_data, 'Matrix size is specified in both info.json and config.h, the config.h values win.') |
| 64 | 'cols': compute(config_c.get('MATRIX_COLS', '0')), | 239 | |
| 65 | } | 240 | info_data['matrix_size'] = { |
| 66 | info_data['matrix_pins'] = {} | 241 | 'cols': compute(config_c.get('MATRIX_COLS', '0')), |
| 242 | 'rows': compute(config_c.get('MATRIX_ROWS', '0')), | ||
| 243 | } | ||
| 67 | 244 | ||
| 68 | if row_pins: | 245 | if row_pins and col_pins: |
| 69 | info_data['matrix_pins']['rows'] = row_pins.split(',') | 246 | if 'matrix_pins' in info_data: |
| 70 | if col_pins: | 247 | _log_warning(info_data, 'Matrix pins are specified in both info.json and config.h, the config.h values win.') |
| 71 | info_data['matrix_pins']['cols'] = col_pins.split(',') | 248 | |
| 249 | info_data['matrix_pins'] = { | ||
| 250 | 'cols': _extract_pins(col_pins), | ||
| 251 | 'rows': _extract_pins(row_pins), | ||
| 252 | } | ||
| 72 | 253 | ||
| 73 | if direct_pins: | 254 | if direct_pins: |
| 74 | direct_pin_array = [] | 255 | if 'matrix_pins' in info_data: |
| 75 | for row in direct_pins.split('},{'): | 256 | _log_warning(info_data, 'Direct pins are specified in both info.json and config.h, the config.h values win.') |
| 76 | if row.startswith('{'): | 257 | |
| 77 | row = row[1:] | 258 | info_data['matrix_pins']['direct'] = _extract_direct_matrix(info_data, direct_pins) |
| 78 | if row.endswith('}'): | 259 | |
| 79 | row = row[:-1] | 260 | return info_data |
| 261 | |||
| 80 | 262 | ||
| 81 | direct_pin_array.append([]) | 263 | def _extract_config_h(info_data): |
| 264 | """Pull some keyboard information from existing config.h files | ||
| 265 | """ | ||
| 266 | config_c = config_h(info_data['keyboard_folder']) | ||
| 82 | 267 | ||
| 83 | for pin in row.split(','): | 268 | # Pull in data from the json map |
| 84 | if pin == 'NO_PIN': | 269 | dotty_info = dotty(info_data) |
| 85 | pin = None | 270 | info_config_map = _json_load(Path('data/mappings/info_config.json')) |
| 86 | 271 | ||
| 87 | direct_pin_array[-1].append(pin) | 272 | for config_key, info_dict in info_config_map.items(): |
| 273 | info_key = info_dict['info_key'] | ||
| 274 | key_type = info_dict.get('value_type', 'str') | ||
| 88 | 275 | ||
| 89 | info_data['matrix_pins']['direct'] = direct_pin_array | 276 | try: |
| 277 | if config_key in config_c and info_dict.get('to_json', True): | ||
| 278 | if dotty_info.get(info_key) and info_dict.get('warn_duplicate', True): | ||
| 279 | _log_warning(info_data, '%s in config.h is overwriting %s in info.json' % (config_key, info_key)) | ||
| 90 | 280 | ||
| 91 | info_data['usb'] = { | 281 | if key_type.startswith('array'): |
| 92 | 'vid': config_c.get('VENDOR_ID'), | 282 | if '.' in key_type: |
| 93 | 'pid': config_c.get('PRODUCT_ID'), | 283 | key_type, array_type = key_type.split('.', 1) |
| 94 | 'device_ver': config_c.get('DEVICE_VER'), | 284 | else: |
| 95 | 'manufacturer': config_c.get('MANUFACTURER'), | 285 | array_type = None |
| 96 | 'product': config_c.get('PRODUCT'), | 286 | |
| 97 | } | 287 | config_value = config_c[config_key].replace('{', '').replace('}', '').strip() |
| 288 | |||
| 289 | if array_type == 'int': | ||
| 290 | dotty_info[info_key] = list(map(int, config_value.split(','))) | ||
| 291 | else: | ||
| 292 | dotty_info[info_key] = config_value.split(',') | ||
| 293 | |||
| 294 | elif key_type == 'bool': | ||
| 295 | dotty_info[info_key] = config_c[config_key] in true_values | ||
| 296 | |||
| 297 | elif key_type == 'hex': | ||
| 298 | dotty_info[info_key] = '0x' + config_c[config_key][2:].upper() | ||
| 299 | |||
| 300 | elif key_type == 'list': | ||
| 301 | dotty_info[info_key] = config_c[config_key].split() | ||
| 302 | |||
| 303 | elif key_type == 'int': | ||
| 304 | dotty_info[info_key] = int(config_c[config_key]) | ||
| 305 | |||
| 306 | else: | ||
| 307 | dotty_info[info_key] = config_c[config_key] | ||
| 308 | |||
| 309 | except Exception as e: | ||
| 310 | _log_warning(info_data, f'{config_key}->{info_key}: {e}') | ||
| 311 | |||
| 312 | info_data.update(dotty_info) | ||
| 313 | |||
| 314 | # Pull data that easily can't be mapped in json | ||
| 315 | _extract_matrix_info(info_data, config_c) | ||
| 98 | 316 | ||
| 99 | return info_data | 317 | return info_data |
| 100 | 318 | ||
| @@ -103,63 +321,143 @@ def _extract_rules_mk(info_data): | |||
| 103 | """Pull some keyboard information from existing rules.mk files | 321 | """Pull some keyboard information from existing rules.mk files |
| 104 | """ | 322 | """ |
| 105 | rules = rules_mk(info_data['keyboard_folder']) | 323 | rules = rules_mk(info_data['keyboard_folder']) |
| 106 | mcu = rules.get('MCU') | 324 | info_data['processor'] = rules.get('MCU', info_data.get('processor', 'atmega32u4')) |
| 325 | |||
| 326 | if info_data['processor'] in CHIBIOS_PROCESSORS: | ||
| 327 | arm_processor_rules(info_data, rules) | ||
| 328 | |||
| 329 | elif info_data['processor'] in LUFA_PROCESSORS + VUSB_PROCESSORS: | ||
| 330 | avr_processor_rules(info_data, rules) | ||
| 331 | |||
| 332 | else: | ||
| 333 | cli.log.warning("%s: Unknown MCU: %s" % (info_data['keyboard_folder'], info_data['processor'])) | ||
| 334 | unknown_processor_rules(info_data, rules) | ||
| 335 | |||
| 336 | # Pull in data from the json map | ||
| 337 | dotty_info = dotty(info_data) | ||
| 338 | info_rules_map = _json_load(Path('data/mappings/info_rules.json')) | ||
| 339 | |||
| 340 | for rules_key, info_dict in info_rules_map.items(): | ||
| 341 | info_key = info_dict['info_key'] | ||
| 342 | key_type = info_dict.get('value_type', 'str') | ||
| 343 | |||
| 344 | try: | ||
| 345 | if rules_key in rules and info_dict.get('to_json', True): | ||
| 346 | if dotty_info.get(info_key) and info_dict.get('warn_duplicate', True): | ||
| 347 | _log_warning(info_data, '%s in rules.mk is overwriting %s in info.json' % (rules_key, info_key)) | ||
| 348 | |||
| 349 | if key_type.startswith('array'): | ||
| 350 | if '.' in key_type: | ||
| 351 | key_type, array_type = key_type.split('.', 1) | ||
| 352 | else: | ||
| 353 | array_type = None | ||
| 354 | |||
| 355 | rules_value = rules[rules_key].replace('{', '').replace('}', '').strip() | ||
| 356 | |||
| 357 | if array_type == 'int': | ||
| 358 | dotty_info[info_key] = list(map(int, rules_value.split(','))) | ||
| 359 | else: | ||
| 360 | dotty_info[info_key] = rules_value.split(',') | ||
| 361 | |||
| 362 | elif key_type == 'list': | ||
| 363 | dotty_info[info_key] = rules[rules_key].split() | ||
| 364 | |||
| 365 | elif key_type == 'bool': | ||
| 366 | dotty_info[info_key] = rules[rules_key] in true_values | ||
| 107 | 367 | ||
| 108 | if mcu in CHIBIOS_PROCESSORS: | 368 | elif key_type == 'hex': |
| 109 | return arm_processor_rules(info_data, rules) | 369 | dotty_info[info_key] = '0x' + rules[rules_key][2:].upper() |
| 110 | 370 | ||
| 111 | elif mcu in LUFA_PROCESSORS + VUSB_PROCESSORS: | 371 | elif key_type == 'int': |
| 112 | return avr_processor_rules(info_data, rules) | 372 | dotty_info[info_key] = int(rules[rules_key]) |
| 113 | 373 | ||
| 114 | msg = "Unknown MCU: " + str(mcu) | 374 | else: |
| 375 | dotty_info[info_key] = rules[rules_key] | ||
| 115 | 376 | ||
| 116 | _log_warning(info_data, msg) | 377 | except Exception as e: |
| 378 | _log_warning(info_data, f'{rules_key}->{info_key}: {e}') | ||
| 379 | |||
| 380 | info_data.update(dotty_info) | ||
| 381 | |||
| 382 | # Merge in config values that can't be easily mapped | ||
| 383 | _extract_features(info_data, rules) | ||
| 384 | |||
| 385 | return info_data | ||
| 117 | 386 | ||
| 118 | return unknown_processor_rules(info_data, rules) | 387 | |
| 388 | def _merge_layouts(info_data, new_info_data): | ||
| 389 | """Merge new_info_data into info_data in an intelligent way. | ||
| 390 | """ | ||
| 391 | for layout_name, layout_json in new_info_data['layouts'].items(): | ||
| 392 | if layout_name in info_data['layouts']: | ||
| 393 | # Pull in layouts we have a macro for | ||
| 394 | if len(info_data['layouts'][layout_name]['layout']) != len(layout_json['layout']): | ||
| 395 | msg = '%s: %s: Number of elements in info.json does not match! info.json:%s != %s:%s' | ||
| 396 | _log_error(info_data, msg % (info_data['keyboard_folder'], layout_name, len(layout_json['layout']), layout_name, len(info_data['layouts'][layout_name]['layout']))) | ||
| 397 | else: | ||
| 398 | for i, key in enumerate(info_data['layouts'][layout_name]['layout']): | ||
| 399 | key.update(layout_json['layout'][i]) | ||
| 400 | else: | ||
| 401 | # Pull in layouts that have matrix data | ||
| 402 | missing_matrix = False | ||
| 403 | for key in layout_json.get('layout', {}): | ||
| 404 | if 'matrix' not in key: | ||
| 405 | missing_matrix = True | ||
| 406 | |||
| 407 | if not missing_matrix: | ||
| 408 | if layout_name in info_data['layouts']: | ||
| 409 | # Update an existing layout with new data | ||
| 410 | for i, key in enumerate(info_data['layouts'][layout_name]['layout']): | ||
| 411 | key.update(layout_json['layout'][i]) | ||
| 412 | |||
| 413 | else: | ||
| 414 | # Copy in the new layout wholesale | ||
| 415 | layout_json['c_macro'] = False | ||
| 416 | info_data['layouts'][layout_name] = layout_json | ||
| 417 | |||
| 418 | return info_data | ||
| 119 | 419 | ||
| 120 | 420 | ||
| 121 | def _search_keyboard_h(path): | 421 | def _search_keyboard_h(path): |
| 122 | current_path = Path('keyboards/') | 422 | current_path = Path('keyboards/') |
| 423 | aliases = {} | ||
| 123 | layouts = {} | 424 | layouts = {} |
| 425 | |||
| 124 | for directory in path.parts: | 426 | for directory in path.parts: |
| 125 | current_path = current_path / directory | 427 | current_path = current_path / directory |
| 126 | keyboard_h = '%s.h' % (directory,) | 428 | keyboard_h = '%s.h' % (directory,) |
| 127 | keyboard_h_path = current_path / keyboard_h | 429 | keyboard_h_path = current_path / keyboard_h |
| 128 | if keyboard_h_path.exists(): | 430 | if keyboard_h_path.exists(): |
| 129 | layouts.update(find_layouts(keyboard_h_path)) | 431 | new_layouts, new_aliases = find_layouts(keyboard_h_path) |
| 432 | layouts.update(new_layouts) | ||
| 433 | |||
| 434 | for alias, alias_text in new_aliases.items(): | ||
| 435 | if alias_text in layouts: | ||
| 436 | aliases[alias] = alias_text | ||
| 130 | 437 | ||
| 131 | return layouts | 438 | return layouts, aliases |
| 132 | 439 | ||
| 133 | 440 | ||
| 134 | def _find_all_layouts(info_data, keyboard, rules): | 441 | def _find_all_layouts(info_data, keyboard): |
| 135 | """Looks for layout macros associated with this keyboard. | 442 | """Looks for layout macros associated with this keyboard. |
| 136 | """ | 443 | """ |
| 137 | layouts = _search_keyboard_h(Path(keyboard)) | 444 | layouts, aliases = _search_keyboard_h(Path(keyboard)) |
| 138 | 445 | ||
| 139 | if not layouts: | 446 | if not layouts: |
| 140 | # If we didn't find any layouts above we widen our search. This is error | 447 | # If we don't find any layouts from info.json or keyboard.h we widen our search. This is error prone which is why we want to encourage people to follow the standard above. |
| 141 | # prone which is why we want to encourage people to follow the standard above. | 448 | info_data['parse_warnings'].append('%s: Falling back to searching for KEYMAP/LAYOUT macros.' % (keyboard)) |
| 142 | _log_warning(info_data, 'Falling back to searching for KEYMAP/LAYOUT macros.') | 449 | |
| 143 | for file in glob('keyboards/%s/*.h' % keyboard): | 450 | for file in glob('keyboards/%s/*.h' % keyboard): |
| 144 | if file.endswith('.h'): | 451 | if file.endswith('.h'): |
| 145 | these_layouts = find_layouts(file) | 452 | these_layouts, these_aliases = find_layouts(file) |
| 453 | |||
| 146 | if these_layouts: | 454 | if these_layouts: |
| 147 | layouts.update(these_layouts) | 455 | layouts.update(these_layouts) |
| 148 | 456 | ||
| 149 | if 'LAYOUTS' in rules: | 457 | if these_aliases: |
| 150 | # Match these up against the supplied layouts | 458 | aliases.update(these_aliases) |
| 151 | supported_layouts = rules['LAYOUTS'].strip().split() | ||
| 152 | for layout_name in sorted(layouts): | ||
| 153 | if not layout_name.startswith('LAYOUT_'): | ||
| 154 | continue | ||
| 155 | layout_name = layout_name[7:] | ||
| 156 | if layout_name in supported_layouts: | ||
| 157 | supported_layouts.remove(layout_name) | ||
| 158 | |||
| 159 | if supported_layouts: | ||
| 160 | _log_error(info_data, 'Missing LAYOUT() macro for %s' % (', '.join(supported_layouts))) | ||
| 161 | 459 | ||
| 162 | return layouts | 460 | return layouts, aliases |
| 163 | 461 | ||
| 164 | 462 | ||
| 165 | def _log_error(info_data, message): | 463 | def _log_error(info_data, message): |
| @@ -180,13 +478,13 @@ def arm_processor_rules(info_data, rules): | |||
| 180 | """Setup the default info for an ARM board. | 478 | """Setup the default info for an ARM board. |
| 181 | """ | 479 | """ |
| 182 | info_data['processor_type'] = 'arm' | 480 | info_data['processor_type'] = 'arm' |
| 183 | info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'unknown' | ||
| 184 | info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown' | ||
| 185 | info_data['protocol'] = 'ChibiOS' | 481 | info_data['protocol'] = 'ChibiOS' |
| 186 | 482 | ||
| 187 | if info_data['bootloader'] == 'unknown': | 483 | if 'bootloader' not in info_data: |
| 188 | if 'STM32' in info_data['processor']: | 484 | if 'STM32' in info_data['processor']: |
| 189 | info_data['bootloader'] = 'stm32-dfu' | 485 | info_data['bootloader'] = 'stm32-dfu' |
| 486 | else: | ||
| 487 | info_data['bootloader'] = 'unknown' | ||
| 190 | 488 | ||
| 191 | if 'STM32' in info_data['processor']: | 489 | if 'STM32' in info_data['processor']: |
| 192 | info_data['platform'] = 'STM32' | 490 | info_data['platform'] = 'STM32' |
| @@ -202,11 +500,12 @@ def avr_processor_rules(info_data, rules): | |||
| 202 | """Setup the default info for an AVR board. | 500 | """Setup the default info for an AVR board. |
| 203 | """ | 501 | """ |
| 204 | info_data['processor_type'] = 'avr' | 502 | info_data['processor_type'] = 'avr' |
| 205 | info_data['bootloader'] = rules['BOOTLOADER'] if 'BOOTLOADER' in rules else 'atmel-dfu' | ||
| 206 | info_data['platform'] = rules['ARCH'] if 'ARCH' in rules else 'unknown' | 503 | info_data['platform'] = rules['ARCH'] if 'ARCH' in rules else 'unknown' |
| 207 | info_data['processor'] = rules['MCU'] if 'MCU' in rules else 'unknown' | ||
| 208 | info_data['protocol'] = 'V-USB' if rules.get('MCU') in VUSB_PROCESSORS else 'LUFA' | 504 | info_data['protocol'] = 'V-USB' if rules.get('MCU') in VUSB_PROCESSORS else 'LUFA' |
| 209 | 505 | ||
| 506 | if 'bootloader' not in info_data: | ||
| 507 | info_data['bootloader'] = 'atmel-dfu' | ||
| 508 | |||
| 210 | # FIXME(fauxpark/anyone): Eventually we should detect the protocol by looking at PROTOCOL inherited from mcu_selection.mk: | 509 | # FIXME(fauxpark/anyone): Eventually we should detect the protocol by looking at PROTOCOL inherited from mcu_selection.mk: |
| 211 | # info_data['protocol'] = 'V-USB' if rules.get('PROTOCOL') == 'VUSB' else 'LUFA' | 510 | # info_data['protocol'] = 'V-USB' if rules.get('PROTOCOL') == 'VUSB' else 'LUFA' |
| 212 | 511 | ||
| @@ -225,38 +524,52 @@ def unknown_processor_rules(info_data, rules): | |||
| 225 | return info_data | 524 | return info_data |
| 226 | 525 | ||
| 227 | 526 | ||
| 527 | def deep_update(origdict, newdict): | ||
| 528 | """Update a dictionary in place, recursing to do a deep copy. | ||
| 529 | """ | ||
| 530 | for key, value in newdict.items(): | ||
| 531 | if isinstance(value, Mapping): | ||
| 532 | origdict[key] = deep_update(origdict.get(key, {}), value) | ||
| 533 | |||
| 534 | else: | ||
| 535 | origdict[key] = value | ||
| 536 | |||
| 537 | return origdict | ||
| 538 | |||
| 539 | |||
| 228 | def merge_info_jsons(keyboard, info_data): | 540 | def merge_info_jsons(keyboard, info_data): |
| 229 | """Return a merged copy of all the info.json files for a keyboard. | 541 | """Return a merged copy of all the info.json files for a keyboard. |
| 230 | """ | 542 | """ |
| 231 | for info_file in find_info_json(keyboard): | 543 | for info_file in find_info_json(keyboard): |
| 232 | # Load and validate the JSON data | 544 | # Load and validate the JSON data |
| 233 | try: | 545 | new_info_data = _json_load(info_file) |
| 234 | with info_file.open('r') as info_fd: | ||
| 235 | new_info_data = json.load(info_fd) | ||
| 236 | except Exception as e: | ||
| 237 | _log_error(info_data, "Invalid JSON in file %s: %s: %s" % (str(info_file), e.__class__.__name__, e)) | ||
| 238 | continue | ||
| 239 | 546 | ||
| 240 | if not isinstance(new_info_data, dict): | 547 | if not isinstance(new_info_data, dict): |
| 241 | _log_error(info_data, "Invalid file %s, root object should be a dictionary." % (str(info_file),)) | 548 | _log_error(info_data, "Invalid file %s, root object should be a dictionary." % (str(info_file),)) |
| 242 | continue | 549 | continue |
| 243 | 550 | ||
| 244 | # Copy whitelisted keys into `info_data` | 551 | try: |
| 245 | for key in ('keyboard_name', 'manufacturer', 'identifier', 'url', 'maintainer', 'processor', 'bootloader', 'width', 'height'): | 552 | keyboard_validate(new_info_data) |
| 246 | if key in new_info_data: | 553 | except jsonschema.ValidationError as e: |
| 247 | info_data[key] = new_info_data[key] | 554 | json_path = '.'.join([str(p) for p in e.absolute_path]) |
| 555 | cli.log.error('Not including data from file: %s', info_file) | ||
| 556 | cli.log.error('\t%s: %s', json_path, e.message) | ||
| 557 | continue | ||
| 248 | 558 | ||
| 249 | # Merge the layouts in | 559 | # Merge layout data in |
| 560 | for layout_name, layout in new_info_data.get('layouts', {}).items(): | ||
| 561 | if layout_name in info_data['layouts']: | ||
| 562 | for new_key, existing_key in zip(layout['layout'], info_data['layouts'][layout_name]['layout']): | ||
| 563 | existing_key.update(new_key) | ||
| 564 | else: | ||
| 565 | layout['c_macro'] = False | ||
| 566 | info_data['layouts'][layout_name] = layout | ||
| 567 | |||
| 568 | # Update info_data with the new data | ||
| 250 | if 'layouts' in new_info_data: | 569 | if 'layouts' in new_info_data: |
| 251 | for layout_name, json_layout in new_info_data['layouts'].items(): | 570 | del (new_info_data['layouts']) |
| 252 | # Only pull in layouts we have a macro for | 571 | |
| 253 | if layout_name in info_data['layouts']: | 572 | deep_update(info_data, new_info_data) |
| 254 | if info_data['layouts'][layout_name]['key_count'] != len(json_layout['layout']): | ||
| 255 | msg = '%s: Number of elements in info.json does not match! info.json:%s != %s:%s' | ||
| 256 | _log_error(info_data, msg % (layout_name, len(json_layout['layout']), layout_name, len(info_data['layouts'][layout_name]['layout']))) | ||
| 257 | else: | ||
| 258 | for i, key in enumerate(info_data['layouts'][layout_name]['layout']): | ||
| 259 | key.update(json_layout['layout'][i]) | ||
| 260 | 573 | ||
| 261 | return info_data | 574 | return info_data |
| 262 | 575 | ||
