Update extract configs (#1845)

* Update config-extraction scripts to work with multiple chips

* Standardise wording between common options

* Various config-related fixes

* Update pico_rand to use correct busctrl_hw struct-name

* Don't start config descriptions with "The ..."
This commit is contained in:
Andrew Scheller 2024-08-27 17:11:42 +01:00 committed by GitHub
parent a700b7784b
commit d639292fea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 454 additions and 217 deletions

View File

@ -3,7 +3,7 @@ include(${CMAKE_CURRENT_LIST_DIR}/find_compiler.cmake)
# include our Platform/PICO.cmake
set(CMAKE_SYSTEM_NAME PICO)
# PICO_CMAKE_CONFIG: PICO_GCC_TRIPLE, List of GCC_TRIPLES -- usually only one -- to try when searching for a compiler. This may be specified the user environment, type=int, default=PICO_DEFAULT_GCC_TRIPLE which is set based on PICO_COMPILER, group=pico_base, doxref=cmake-toolchain-config
# PICO_CMAKE_CONFIG: PICO_GCC_TRIPLE, List of GCC_TRIPLES -- usually only one -- to try when searching for a compiler. This may be specified the user environment, type=int, default=PICO_DEFAULT_GCC_TRIPLE which is set based on PICO_COMPILER, group=pico_base, docref=cmake-toolchain-config
if (NOT PICO_GCC_TRIPLE)
if (DEFINED ENV{_SAVED_PICO_GCC_TRIPLE})
# saved within the same cmake invocation

View File

@ -63,7 +63,7 @@ function(pico_add_extra_outputs TARGET)
pico_add_map_output(${TARGET})
# PICO_CMAKE_CONFIG: PICO_NO_TARGET_NAME, Don't define PICO_TARGET_NAME, type=bool, default=0, group=build
# PICO_BUILD_DEFINE: PICO_TARGET_NAME, The name of the build target being compiled (unless PICO_NO_TARGET_NAME set in build), type=string, default=target name, group=build
# PICO_BUILD_DEFINE: PICO_TARGET_NAME, Name of the build target being compiled (unless PICO_NO_TARGET_NAME set in build), type=string, default=target name, group=build
if (NOT PICO_NO_TARGET_NAME)
target_compile_definitions(${TARGET} PRIVATE
PICO_TARGET_NAME="${TARGET}"
@ -94,4 +94,4 @@ set(PICO_NO_HARDWARE "0" CACHE INTERNAL "")
set(PICO_ON_DEVICE "1" CACHE INTERNAL "")
set(CMAKE_EXECUTABLE_SUFFIX .elf)
set(CMAKE_EXECUTABLE_SUFFIX "${CMAKE_EXECUTABLE_SUFFIX}" PARENT_SCOPE)
set(CMAKE_EXECUTABLE_SUFFIX "${CMAKE_EXECUTABLE_SUFFIX}" PARENT_SCOPE)

View File

@ -18,7 +18,7 @@
#define PICO_DISABLE_SHARED_IRQ_HANDLERS 0
#endif
// PICO_CONFIG: PICO_VTABLE_PER_CORE, user is using separate vector tables per core, type=bool, default=0, group=hardware_irq
// PICO_CONFIG: PICO_VTABLE_PER_CORE, User is using separate vector tables per core, type=bool, default=0, group=hardware_irq
#ifndef PICO_VTABLE_PER_CORE
#define PICO_VTABLE_PER_CORE 0
#endif

View File

@ -26,52 +26,52 @@ static inline void atomic_thread_fence(uint x) {}
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_ATOMIC, Spinlock ID for atomic protection, min=0, max=31, default=8, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_ATOMIC, Spinlock ID for atomics, min=0, max=31, default=8, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_ATOMIC
#define PICO_SPINLOCK_ID_ATOMIC 8
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_IRQ, Spinlock ID for IRQ protection, min=0, max=31, default=9, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_IRQ, Spinlock ID for IRQ protection, min=0, max=31, default=9, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_IRQ
#define PICO_SPINLOCK_ID_IRQ 9
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_TIMER, Spinlock ID for Timer protection, min=0, max=31, default=10, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_TIMER, Spinlock ID for Timer protection, min=0, max=31, default=10, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_TIMER
#define PICO_SPINLOCK_ID_TIMER 10
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_HARDWARE_CLAIM, Spinlock ID for Hardware claim protection, min=0, max=31, default=11, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_HARDWARE_CLAIM, Spinlock ID for Hardware claim protection, min=0, max=31, default=11, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_HARDWARE_CLAIM
#define PICO_SPINLOCK_ID_HARDWARE_CLAIM 11
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_RAND, Spinlock ID for Random Number Generator, min=0, max=31, default=12, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_RAND, Spinlock ID for Random Number Generator, min=0, max=31, default=12, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_RAND
#define PICO_SPINLOCK_ID_RAND 12
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_OS1, First Spinlock ID reserved for use by low level OS style software, min=0, max=31, default=14, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_OS1, First Spinlock ID reserved for use by low level OS style software, min=0, max=31, default=14, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_OS1
#define PICO_SPINLOCK_ID_OS1 14
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_OS2, Second Spinlock ID reserved for use by low level OS style software, min=0, max=31, default=15, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_OS2, Second Spinlock ID reserved for use by low level OS style software, min=0, max=31, default=15, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_OS2
#define PICO_SPINLOCK_ID_OS2 15
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_STRIPED_FIRST, Lowest Spinlock ID in the 'striped' range, min=0, max=31, default=16, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_STRIPED_FIRST, Lowest Spinlock ID in the 'striped' range, min=0, max=31, default=16, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_STRIPED_FIRST
#define PICO_SPINLOCK_ID_STRIPED_FIRST 16
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_STRIPED_LAST, Highest Spinlock ID in the 'striped' range, min=0, max=31, default=23, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_STRIPED_LAST, Highest Spinlock ID in the 'striped' range, min=0, max=31, default=23, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_STRIPED_LAST
#define PICO_SPINLOCK_ID_STRIPED_LAST 23
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_CLAIM_FREE_FIRST, Lowest Spinlock ID in the 'claim free' range, min=0, max=31, default=24, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_CLAIM_FREE_FIRST, Lowest Spinlock ID in the 'claim free' range, min=0, max=31, default=24, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_CLAIM_FREE_FIRST
#define PICO_SPINLOCK_ID_CLAIM_FREE_FIRST 24
#endif
@ -80,7 +80,7 @@ static inline void atomic_thread_fence(uint x) {}
#warning PICO_SPINLOCK_ID_CLAIM_FREE_END has been renamed to PICO_SPINLOCK_ID_CLAIM_FREE_LAST
#endif
/// PICO_CONFIG: PICO_SPINLOCK_ID_CLAIM_FREE_LAST, Highest Spinlock ID in the 'claim free' range, min=0, max=31, default=31, group=hardware_sync
// PICO_CONFIG: PICO_SPINLOCK_ID_CLAIM_FREE_LAST, Highest Spinlock ID in the 'claim free' range, min=0, max=31, default=31, group=hardware_sync
#ifndef PICO_SPINLOCK_ID_CLAIM_FREE_LAST
#define PICO_SPINLOCK_ID_CLAIM_FREE_LAST 31
#endif

View File

@ -11,7 +11,7 @@
#include "pico.h"
// PICO_CONFIG: PICO_BUILD_BOOT_STAGE2_NAME, The name of the boot stage 2 if selected by the build, group=boot_stage2
// PICO_CONFIG: PICO_BUILD_BOOT_STAGE2_NAME, Name of the boot stage 2 if selected in the build system, group=boot_stage2
#ifdef PICO_BUILD_BOOT_STAGE2_NAME
#define _BOOT_STAGE2_SELECTED
#else

View File

@ -66,11 +66,6 @@
#define PICO_RP2040_B2_SUPPORTED 1
#endif
// PICO_CONFIG: PICO_RP2350_A2_SUPPORTED, Whether to include any specific software support for RP2350 A2 revision, type=bool, default=1, advanced=true, group=pico_platform
#ifndef PICO_RP2350_A2_SUPPORTED
#define PICO_RP2350_A2_SUPPORTED 1
#endif
#ifndef PICO_RAM_VECTOR_TABLE_SIZE
#define PICO_RAM_VECTOR_TABLE_SIZE (VTABLE_FIRST_IRQ + NUM_IRQS)
#endif
@ -212,4 +207,4 @@ return a;
#endif // __ASSEMBLER__
#endif
#endif

View File

@ -74,7 +74,7 @@
#define HAS_RP2350_TRNG 1
#define HAS_HSTX 1
// PICO_CONFIG: XOSC_HZ, The crystal oscillator frequency in Hz, type=int, default=12000000, advanced=true, group=hardware_base
// PICO_CONFIG: XOSC_HZ, Crystal oscillator frequency in Hz, type=int, default=12000000, advanced=true, group=hardware_base
// NOTE: The system and USB clocks are generated from the frequency using two PLLs.
// If you override this define, or SYS_CLK_HZ/USB_CLK_HZ below, you will *also* need to add your own adjusted PLL set-up defines to
// override the defaults which live in src/rp2_common/hardware_clocks/include/hardware/clocks.h
@ -89,7 +89,7 @@
#endif
#endif
// PICO_CONFIG: SYS_CLK_HZ, The system operating frequency in Hz, type=int, default=150000000, advanced=true, group=hardware_base
// PICO_CONFIG: SYS_CLK_HZ, System operating frequency in Hz, type=int, default=150000000, advanced=true, group=hardware_base
#ifndef SYS_CLK_HZ
#ifdef SYS_CLK_KHZ
#define SYS_CLK_HZ ((SYS_CLK_KHZ) * _u(1000))

View File

@ -30,6 +30,14 @@
#endif
// PICO_CONFIG: PICO_RP2350A, Whether the current board has an RP2350 in an A (30 GPIO) package, type=bool, default=Usually provided via board header, group=pico_platform
#if 0 // make tooling checks happy
#define PICO_RP2350A 0
#endif
// PICO_CONFIG: PICO_RP2350_A2_SUPPORTED, Whether to include any specific software support for RP2350 A2 revision, type=bool, default=1, advanced=true, group=pico_platform
#ifndef PICO_RP2350_A2_SUPPORTED
#define PICO_RP2350_A2_SUPPORTED 1
#endif
// PICO_CONFIG: PICO_STACK_SIZE, Minimum amount of stack space reserved in the linker script for each core. See also PICO_CORE1_STACK_SIZE, min=0x100, default=0x800, advanced=true, group=pico_platform
#ifndef PICO_STACK_SIZE
@ -99,12 +107,10 @@ static inline void busy_wait_at_least_cycles(uint32_t minimum_cycles) {
);
}
// PICO_CONFIG: PICO_NO_FPGA_CHECK, Remove the FPGA platform check for small code size reduction, type=bool, default=platform dependent, advanced=true, group=pico_runtime
// PICO_CONFIG: PICO_NO_FPGA_CHECK, Remove the FPGA platform check for small code size reduction, type=bool, default=1, advanced=true, group=pico_runtime
#ifndef PICO_NO_FPGA_CHECK
#if !PICO_RP2040
#define PICO_NO_FPGA_CHECK 1
#endif
#endif
// PICO_CONFIG: PICO_NO_SIM_CHECK, Remove the SIM platform check for small code size reduction, type=bool, default=1, advanced=true, group=pico_runtime
#ifndef PICO_NO_SIM_CHECK

View File

@ -1,7 +1,7 @@
# Deferring this config until we decide how to include other CMSIS libraries... it is likely that we always want to use the stub version of the core
# at least if the vendor/device is RaspberryPi/RP2040...
## PICO_CMAKE_CONFIG: PICO_CMSIS_PATH, directory to locate CMSIS installation, default="included stub CORE only impl", group=build
## PICO_CMAKE_CONFIG: PICO_CMSIS_PATH, Directory to locate CMSIS installation, type=string, default="included stub CORE only impl", group=build
#if (DEFINED PICO_CMSIS_PATH)
# set(PICO_CMSIS_PATH "${PICO_CMSIS_PATH}" CACHE PATH "Path to the CMSIS tree to use with Raspberry Pi Pico SDK")
# message("Using specified PICO_CMSIS_PATH for CMSIS ('${PICO_CMSIS_PATH}')")
@ -10,7 +10,7 @@
# message("Using PICO_CMSIS_PATH from environment for CMSIS ('${PICO_CMSIS_PATH}')")
#endif()
#
## PICO_CMAKE_CONFIG: PICO_CMSIS_VENDOR, vendor name for CMSIS, default="RaspberryPi", group=build
## PICO_CMAKE_CONFIG: PICO_CMSIS_VENDOR, Vendor name for CMSIS, type=string, default="RaspberryPi", group=build
#if (DEFINED PICO_CMSIS_VENDOR)
# set(PICO_CMSIS_VENDOR "${PICO_CMSIS_VENDOR}" CACHE STRING "CMSIS vendor name to use")
# message("Using specified PICO_CMSIS_VENDOR for CMSIS ('${PICO_CMSIS_VENDOR}')")
@ -21,7 +21,7 @@
# set(PICO_CMSIS_VENDOR RaspberryPi)
#endif()
#
## PICO_CMAKE_CONFIG: PICO_CMSIS_DEVICE, device name for CMSIS, default="RP2040", group=build
## PICO_CMAKE_CONFIG: PICO_CMSIS_DEVICE, Device name for CMSIS, type=string, default="RP2040", group=build
#if (DEFINED PICO_CMSIS_DEVICE)
# set(PICO_CMSIS_DEVICE "${PICO_CMSIS_DEVICE}" CACHE STRING "CMSIS device name to use")
# message("Using specified PICO_CMSIS_DEVICE for CMSIS ('${PICO_CMSIS_DEVICE}')")

View File

@ -192,7 +192,7 @@ extern "C" {
#ifndef PLL_SYS_VCO_FREQ_HZ
#define PLL_SYS_VCO_FREQ_HZ (1500 * MHZ)
#endif
// PICO_CONFIG: PLL_SYS_POSTDIV1, System clock PLL post divider 1 setting, type=int, default=6 on RP2040 5 or on RP2350, advanced=true, group=hardware_clocks
// PICO_CONFIG: PLL_SYS_POSTDIV1, System clock PLL post divider 1 setting, type=int, default=6 on RP2040 or 5 on RP2350, advanced=true, group=hardware_clocks
#ifndef PLL_SYS_POSTDIV1
#if SYS_CLK_HZ == 125 * MHZ
#define PLL_SYS_POSTDIV1 6

View File

@ -18,7 +18,7 @@
#define PICO_DISABLE_SHARED_IRQ_HANDLERS 0
#endif
// PICO_CONFIG: PICO_VTABLE_PER_CORE, user is using separate vector tables per core, type=bool, default=0, group=hardware_irq
// PICO_CONFIG: PICO_VTABLE_PER_CORE, User is using separate vector tables per core, type=bool, default=0, group=hardware_irq
#ifndef PICO_VTABLE_PER_CORE
#define PICO_VTABLE_PER_CORE 0
#endif

View File

@ -23,7 +23,7 @@
#endif
#endif
// PICO_CONFIG: PICO_PIO_VERSION, The PIO hardware version, type=int, default=0 on RP2040 and 1 on RP2350, group=hardware_pio
// PICO_CONFIG: PICO_PIO_VERSION, PIO hardware version, type=int, default=0 on RP2040 and 1 on RP2350, group=hardware_pio
#ifndef PICO_PIO_VERSION
#if PIO_GPIOBASE_BITS
#define PICO_PIO_VERSION 1

View File

@ -2,7 +2,7 @@ pico_simple_hardware_target(uart)
pico_mirrored_target_link_libraries(hardware_uart INTERFACE hardware_resets hardware_clocks)
# PICO_CONFIG: PICO_DEFAULT_UART_BAUD_RATE, Define the default UART baudrate, max=921600, default=115200, group=hardware_uart
# PICO_CMAKE_CONFIG: PICO_DEFAULT_UART_BAUD_RATE, Define the default UART baudrate, type=int, max=921600, default=115200, group=hardware_uart
if (PICO_DEFAULT_UART_BAUD_RATE)
target_compile_definitions(hardware_uart INTERFACE
PICO_DEFAULT_UART_BAUD_RATE=${PICO_DEFAULT_UART_BAUD_RATE})

View File

@ -10,7 +10,7 @@
#include "hardware/boot_lock.h"
#include "pico/bootrom_constants.h"
// PICO_CONFIG: PICO_BOOTROM_LOCKING_ENABLED, Enable/disable locking for bootrom functions that use shared reqsources. If this flag is enabled bootrom lock checking is turned on and BOOT locks are taken around the relevant bootrom functions, type=bool, default=1, group=pico_bootrom
// PICO_CONFIG: PICO_BOOTROM_LOCKING_ENABLED, Enable/disable locking for bootrom functions that use shared resources. If this flag is enabled bootrom lock checking is turned on and BOOT locks are taken around the relevant bootrom functions, type=bool, default=1, group=pico_bootrom
#ifndef PICO_BOOTROM_LOCKING_ENABLED
#if NUM_BOOT_LOCKS > 0
#define PICO_BOOTROM_LOCKING_ENABLED 1

View File

@ -70,7 +70,7 @@ extern "C" {
#endif
#endif
// PICO_CONFIG: PICO_RAND_ENTROPY_SRC_TRNG, Enable/disable use of hardware TRNG as an entropy source, type=bool, default=1 if no hardware TRNG, group=pico_rand
// PICO_CONFIG: PICO_RAND_ENTROPY_SRC_TRNG, Enable/disable use of hardware TRNG as an entropy source, type=bool, default=1 if hardware TRNG, group=pico_rand
#ifndef PICO_RAND_ENTROPY_SRC_TRNG
#if HAS_RP2350_TRNG
#define PICO_RAND_ENTROPY_SRC_TRNG 1
@ -108,9 +108,9 @@ extern "C" {
#define PICO_RAND_SEED_ENTROPY_SRC_TIME PICO_RAND_ENTROPY_SRC_TIME
#endif
// PICO_CONFIG: PICO_RAND_SEED_ENTROPY_SRC_BUF_PERF_COUNTER, Enable/disable use of a bus performance counter as an entropy source for the random seed, type=bool, default=PICO_RAND_ENTROPY_SRC_BUS_PERF_COUNTER, group=pico_rand
#ifndef PICO_RAND_SEED_ENTROPY_SRC_BUF_PERF_COUNTER
#define PICO_RAND_SEED_ENTROPY_SRC_BUF_PERF_COUNTER PICO_RAND_ENTROPY_SRC_BUS_PERF_COUNTER
// PICO_CONFIG: PICO_RAND_SEED_ENTROPY_SRC_BUS_PERF_COUNTER, Enable/disable use of a bus performance counter as an entropy source for the random seed, type=bool, default=PICO_RAND_ENTROPY_SRC_BUS_PERF_COUNTER, group=pico_rand
#ifndef PICO_RAND_SEED_ENTROPY_SRC_BUS_PERF_COUNTER
#define PICO_RAND_SEED_ENTROPY_SRC_BUS_PERF_COUNTER PICO_RAND_ENTROPY_SRC_BUS_PERF_COUNTER
#endif
// PICO_CONFIG: PICO_RAND_SEED_ENTROPY_SRC_BOOT_RANDOM, Enable/disable use of the per boot random number as an entropy source for the random seed, type=bool, default=0 on RP2040 which has none, group=pico_rand

View File

@ -297,8 +297,8 @@ static void initialise_rand(void) {
#if PICO_RAND_SEED_ENTROPY_SRC_BUS_PERF_COUNTER
#if !PICO_RAND_BUS_PERF_COUNTER_INDEX
int idx = -1;
for(uint i = 0; i < count_of(bus_ctrl_hw->counter); i++) {
if (bus_ctrl_hw->counter[i].sel == BUSCTRL_PERFSEL0_RESET) {
for(uint i = 0; i < count_of(busctrl_hw->counter); i++) {
if (busctrl_hw->counter[i].sel == BUSCTRL_PERFSEL0_RESET) {
idx = (int)i;
break;
}
@ -308,7 +308,7 @@ static void initialise_rand(void) {
#else
bus_counter_idx = (uint8_t)PICO_RAND_BUS_PERF_COUNTER_INDEX;
#endif
bus_ctrl_hw->counter[bus_counter_idx].sel = PICO_RAND_BUS_PERF_COUNTER_EVENT;
busctrl_hw->counter[bus_counter_idx].sel = PICO_RAND_BUS_PERF_COUNTER_EVENT;
#endif
(void) xoroshiro128ss(&local_rng_state);
rng_state = local_rng_state;

View File

@ -51,7 +51,7 @@
#endif
#endif
// PICO_CONFIG: PICO_STDIO_USB_RESET_MAGIC_BAUD_RATE, baud rate that if selected causes a reset into BOOTSEL mode (if PICO_STDIO_USB_ENABLE_RESET_VIA_BAUD_RATE is set), default=1200, group=pico_stdio_usb
// PICO_CONFIG: PICO_STDIO_USB_RESET_MAGIC_BAUD_RATE, Baud rate that if selected causes a reset into BOOTSEL mode (if PICO_STDIO_USB_ENABLE_RESET_VIA_BAUD_RATE is set), default=1200, group=pico_stdio_usb
#ifndef PICO_STDIO_USB_RESET_MAGIC_BAUD_RATE
#define PICO_STDIO_USB_RESET_MAGIC_BAUD_RATE 1200
#endif

View File

@ -7,7 +7,7 @@
#
# Script to scan the Raspberry Pi Pico SDK tree searching for CMake build defines
# Outputs a tab separated file of the configuration item:
# name location description type default group
# name location platform chip description type default group
#
# Usage:
#
@ -22,17 +22,28 @@ import re
import csv
import logging
from collections import defaultdict
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
scandir = sys.argv[1]
outfile = sys.argv[2] if len(sys.argv) > 2 else 'pico_build_defines.tsv'
BUILD_DEFINE_RE = re.compile(r'#\s+PICO_BUILD_DEFINE:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$')
BASE_CONFIG_NAME = 'PICO_CONFIG'
BASE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CONFIG_NAME))
BASE_CMAKE_CONFIG_NAME = 'PICO_CMAKE_CONFIG'
BASE_CMAKE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CMAKE_CONFIG_NAME))
BASE_BUILD_DEFINE_NAME = 'PICO_BUILD_DEFINE'
BASE_BUILD_DEFINE_RE = re.compile(r'\b{}\b'.format(BASE_BUILD_DEFINE_NAME))
all_configs = {}
BUILD_DEFINE_RE = re.compile(r'#\s+{}:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$'.format(BASE_BUILD_DEFINE_NAME))
CHIP_NAMES = ["rp2040", "rp2350"]
chips_all_configs = defaultdict(dict)
all_attrs = set()
all_descriptions = {}
chips_all_descriptions = defaultdict(dict)
@ -94,7 +105,7 @@ def ValidateAttrs(config_attrs, file_path, linenum):
assert 'max' not in config_attrs
_default = config_attrs.get('default', None)
else:
raise Exception("Found unknown PICO_BUILD_DEFINE type {} at {}:{}".format(_type, file_path, linenum))
raise Exception("Found unknown {} type {} at {}:{}".format(BASE_BUILD_DEFINE_NAME, _type, file_path, linenum))
@ -106,63 +117,133 @@ for dirpath, dirnames, filenames in os.walk(scandir):
file_ext = os.path.splitext(filename)[1]
if filename == 'CMakeLists.txt' or file_ext == '.cmake':
file_path = os.path.join(dirpath, filename)
applicable = "all"
for chip in (*CHIP_NAMES, "host"):
if "/{}/".format(chip) in dirpath:
applicable = chip
break
with open(file_path, encoding="ISO-8859-1") as fh:
linenum = 0
for line in fh.readlines():
linenum += 1
line = line.strip()
m = BUILD_DEFINE_RE.match(line)
if m:
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
if config_description in all_descriptions:
raise Exception("Found description {} at {}:{} but it was already used at {}:{}".format(config_description, file_path, linenum, os.path.join(scandir, all_descriptions[config_description]['filename']), all_descriptions[config_description]['line_number']))
if BASE_CONFIG_RE.search(line):
raise Exception("Found {} at {}:{} ({}) which isn't expected in {} files".format(BASE_CONFIG_NAME, file_path, linenum, line, filename if filename == 'CMakeLists.txt' else file_ext))
elif BASE_BUILD_DEFINE_RE.search(line):
m = BUILD_DEFINE_RE.match(line)
if not m:
if line.startswith("## "):
logger.info("Possible misformatted {} at {}:{} ({})".format(BASE_BUILD_DEFINE_NAME, file_path, linenum, line))
else:
raise Exception("Found misformatted {} at {}:{} ({})".format(BASE_BUILD_DEFINE_NAME, file_path, linenum, line))
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum}
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
all_descriptions = chips_all_descriptions[applicable]
if config_description in all_descriptions:
raise Exception("Found description {} at {}:{} but it was already used at {}:{}".format(config_description, file_path, linenum, os.path.join(scandir, all_descriptions[config_description]['filename']), all_descriptions[config_description]['line_number']))
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum}
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
#print(file_path, config_name, config_attrs)
if config_name in all_configs:
raise Exception("Found {} at {}:{} but it was already declared at {}:{}".format(config_name, file_path, linenum, os.path.join(scandir, all_configs[config_name]['filename']), all_configs[config_name]['line_number']))
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum, 'description': config_description}
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
#print(file_path, config_name, config_attrs)
all_configs = chips_all_configs[applicable]
if config_name in all_configs:
raise Exception("Found {} at {}:{} but it was already declared at {}:{}".format(config_name, file_path, linenum, os.path.join(scandir, all_configs[config_name]['filename']), all_configs[config_name]['line_number']))
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum, 'description': config_description}
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
for applicable, all_configs in chips_all_configs.items():
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
ValidateAttrs(config_obj['attrs'], file_path, linenum)
ValidateAttrs(config_obj['attrs'], file_path, linenum)
# All settings in "host" should also be in "all"
for config_name, config_obj in chips_all_configs["host"].items():
if config_name not in chips_all_configs["all"]:
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
raise Exception("Found 'host' config {} at {}:{}, but no matching non-host config found".format(config_name, file_path, linenum))
# Any chip-specific settings should not be in "all"
for chip in CHIP_NAMES:
for config_name, chip_config_obj in chips_all_configs[chip].items():
if config_name in chips_all_configs["all"]:
all_config_obj = chips_all_configs["all"][config_name]
chip_file_path = os.path.join(scandir, chip_config_obj['filename'])
chip_linenum = chip_config_obj['line_number']
all_file_path = os.path.join(scandir, all_config_obj['filename'])
all_linenum = all_config_obj['line_number']
raise Exception("'{}' config {} at {}:{} also found at {}:{}".format(chip, config_name, chip_file_path, chip_linenum, all_file_path, all_linenum))
def build_mismatch_exception_message(name, thing, config_obj1, value1, config_obj2, value2):
obj1_filepath = os.path.join(scandir, config_obj1['filename'])
obj2_filepath = os.path.join(scandir, config_obj2['filename'])
return "'{}' {} mismatch at {}:{} ({}) and {}:{} ({})".format(name, thing, obj1_filepath, config_obj1['line_number'], value1, obj2_filepath, config_obj2['line_number'], value2)
# Check that any identically-named setttings have appropriate matching attributes
for applicable in chips_all_configs:
for other in chips_all_configs:
if other == applicable:
continue
for config_name, applicable_config_obj in chips_all_configs[applicable].items():
if config_name in chips_all_configs[other]:
other_config_obj = chips_all_configs[other][config_name]
# Check that fields match
for field in ['description']:
applicable_value = applicable_config_obj[field]
other_value = other_config_obj[field]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, field, applicable_config_obj, applicable_value, other_config_obj, other_value))
# Check that attributes match
for attr in applicable_config_obj['attrs']:
if attr != 'default': # totally fine for defaults to vary per-platform
applicable_value = applicable_config_obj['attrs'][attr]
other_value = other_config_obj['attrs'][attr]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, "attribute '{}'".format(attr), applicable_config_obj, applicable_value, other_config_obj, other_value))
# Sort the output alphabetically by name and then by chip
output_rows = set()
for chip in (*CHIP_NAMES, "host", "all"):
if chip in chips_all_configs:
all_configs = chips_all_configs[chip]
for config_name in all_configs:
output_rows.add((config_name, chip))
with open(outfile, 'w', newline='') as csvfile:
fieldnames = ('name', 'location', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
fieldnames = ('name', 'location', 'platform', 'chip', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore', dialect='excel-tab')
writer.writeheader()
for config_name, config_obj in sorted(all_configs.items()):
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'description': config_obj['description'], **config_obj['attrs']})
for config_name, chip in sorted(output_rows):
config_obj = chips_all_configs[chip][config_name]
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'platform': "host" if chip == "host" else "rp2", 'chip': chip if chip in CHIP_NAMES else "all", 'description': config_obj['description'], **config_obj['attrs']})

View File

@ -7,7 +7,7 @@
#
# Script to scan the Raspberry Pi Pico SDK tree searching for CMake configuration items
# Outputs a tab separated file of the configuration item:
# name location description type advanced default group
# name location platform chip description type advanced default docref group
#
# Usage:
#
@ -22,17 +22,28 @@ import re
import csv
import logging
from collections import defaultdict
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
scandir = sys.argv[1]
outfile = sys.argv[2] if len(sys.argv) > 2 else 'pico_cmake_configs.tsv'
CMAKE_CONFIG_RE = re.compile(r'#\s+PICO_CMAKE_CONFIG:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$')
BASE_CONFIG_NAME = 'PICO_CONFIG'
BASE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CONFIG_NAME))
BASE_CMAKE_CONFIG_NAME = 'PICO_CMAKE_CONFIG'
BASE_CMAKE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CMAKE_CONFIG_NAME))
BASE_BUILD_DEFINE_NAME = 'PICO_BUILD_DEFINE'
BASE_BUILD_DEFINE_RE = re.compile(r'\b{}\b'.format(BASE_BUILD_DEFINE_NAME))
all_configs = {}
CMAKE_CONFIG_RE = re.compile(r'#\s+{}:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$'.format(BASE_CMAKE_CONFIG_NAME))
CHIP_NAMES = ["rp2040", "rp2350"]
chips_all_configs = defaultdict(dict)
all_attrs = set()
all_descriptions = {}
chips_all_descriptions = defaultdict(dict)
@ -94,7 +105,7 @@ def ValidateAttrs(config_attrs, file_path, linenum):
assert 'max' not in config_attrs
_default = config_attrs.get('default', None)
else:
raise Exception("Found unknown PICO_CMAKE_CONFIG type {} at {}:{}".format(_type, file_path, linenum))
raise Exception("Found unknown {} type {} at {}:{}".format(BASE_CMAKE_CONFIG_NAME, _type, file_path, linenum))
@ -106,72 +117,133 @@ for dirpath, dirnames, filenames in os.walk(scandir):
file_ext = os.path.splitext(filename)[1]
if filename == 'CMakeLists.txt' or file_ext == '.cmake':
file_path = os.path.join(dirpath, filename)
applicable = "all"
for chip in (*CHIP_NAMES, "host"):
if "/{}/".format(chip) in dirpath:
applicable = chip
break
with open(file_path, encoding="ISO-8859-1") as fh:
linenum = 0
for line in fh.readlines():
linenum += 1
line = line.strip()
m = CMAKE_CONFIG_RE.match(line)
if m:
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
if BASE_CONFIG_RE.search(line):
raise Exception("Found {} at {}:{} ({}) which isn't expected in {} files".format(BASE_CONFIG_NAME, file_path, linenum, line, filename if filename == 'CMakeLists.txt' else file_ext))
elif BASE_CMAKE_CONFIG_RE.search(line):
m = CMAKE_CONFIG_RE.match(line)
if not m:
if line.startswith("## "):
logger.info("Possible misformatted {} at {}:{} ({})".format(BASE_CMAKE_CONFIG_NAME, file_path, linenum, line))
else:
raise Exception("Found misformatted {} at {}:{} ({})".format(BASE_CMAKE_CONFIG_NAME, file_path, linenum, line))
else:
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
if config_description in all_descriptions:
# relax check for the same header/variable in a different tree
if config_name != all_descriptions[config_description]['config_name'] or filename != all_descriptions[config_description]['filename_only']:
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
all_descriptions = chips_all_descriptions[applicable]
if config_description in all_descriptions:
raise Exception("Found description {} at {}:{} but it was already used at {}:{}".format(config_description, file_path, linenum, os.path.join(scandir, all_descriptions[config_description]['filename']), all_descriptions[config_description]['line_number']))
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'filename_only':filename, 'line_number': linenum}
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum}
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
#print(file_path, config_name, config_attrs)
if config_name in all_configs:
# relax check for the same header/variable in a different tree
if filename != all_configs[config_name]['filename_only']:
#print(file_path, config_name, config_attrs)
all_configs = chips_all_configs[applicable]
if config_name in all_configs:
raise Exception("Found {} at {}:{} but it was already declared at {}:{}".format(config_name, file_path, linenum, os.path.join(scandir, all_configs[config_name]['filename']), all_configs[config_name]['line_number']))
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'filename_only' : filename, 'line_number': linenum, 'description': config_description}
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum, 'description': config_description}
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
for applicable, all_configs in chips_all_configs.items():
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
ValidateAttrs(config_obj['attrs'], file_path, linenum)
ValidateAttrs(config_obj['attrs'], file_path, linenum)
# All settings in "host" should also be in "all"
for config_name, config_obj in chips_all_configs["host"].items():
if config_name not in chips_all_configs["all"]:
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
raise Exception("Found 'host' config {} at {}:{}, but no matching non-host config found".format(config_name, file_path, linenum))
# Any chip-specific settings should not be in "all"
for chip in CHIP_NAMES:
for config_name, chip_config_obj in chips_all_configs[chip].items():
if config_name in chips_all_configs["all"]:
all_config_obj = chips_all_configs["all"][config_name]
chip_file_path = os.path.join(scandir, chip_config_obj['filename'])
chip_linenum = chip_config_obj['line_number']
all_file_path = os.path.join(scandir, all_config_obj['filename'])
all_linenum = all_config_obj['line_number']
raise Exception("'{}' config {} at {}:{} also found at {}:{}".format(chip, config_name, chip_file_path, chip_linenum, all_file_path, all_linenum))
def build_mismatch_exception_message(name, thing, config_obj1, value1, config_obj2, value2):
obj1_filepath = os.path.join(scandir, config_obj1['filename'])
obj2_filepath = os.path.join(scandir, config_obj2['filename'])
return "'{}' {} mismatch at {}:{} ({}) and {}:{} ({})".format(name, thing, obj1_filepath, config_obj1['line_number'], value1, obj2_filepath, config_obj2['line_number'], value2)
# Check that any identically-named setttings have appropriate matching attributes
for applicable in chips_all_configs:
for other in chips_all_configs:
if other == applicable:
continue
for config_name, applicable_config_obj in chips_all_configs[applicable].items():
if config_name in chips_all_configs[other]:
other_config_obj = chips_all_configs[other][config_name]
# Check that fields match
for field in ['description']:
applicable_value = applicable_config_obj[field]
other_value = other_config_obj[field]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, field, applicable_config_obj, applicable_value, other_config_obj, other_value))
# Check that attributes match
for attr in applicable_config_obj['attrs']:
if attr != 'default': # totally fine for defaults to vary per-platform
applicable_value = applicable_config_obj['attrs'][attr]
other_value = other_config_obj['attrs'][attr]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, "attribute '{}'".format(attr), applicable_config_obj, applicable_value, other_config_obj, other_value))
# Sort the output alphabetically by name and then by chip
output_rows = set()
for chip in (*CHIP_NAMES, "host", "all"):
if chip in chips_all_configs:
all_configs = chips_all_configs[chip]
for config_name in all_configs:
output_rows.add((config_name, chip))
with open(outfile, 'w', newline='') as csvfile:
fieldnames = ('name', 'location', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
fieldnames = ('name', 'location', 'platform', 'chip', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore', dialect='excel-tab')
writer.writeheader()
for config_name, config_obj in sorted(all_configs.items()):
# kinda ugly, but good enough for now without messing with TSV
if 'docref' in config_obj['attrs']:
desc = "{} (see <<{}>>)".format(config_obj['description'], config_obj['attrs']['docref'])
else:
desc = config_obj['description']
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'description': desc, **config_obj['attrs']})
for config_name, chip in sorted(output_rows):
config_obj = chips_all_configs[chip][config_name]
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'platform': "host" if chip == "host" else "rp2", 'chip': chip if chip in CHIP_NAMES else "all", 'description': config_obj['description'], **config_obj['attrs']})

View File

@ -7,11 +7,11 @@
#
# Script to scan the Raspberry Pi Pico SDK tree searching for configuration items
# Outputs a tab separated file of the configuration item:
# name location description type advanced default depends enumvalues group max min
# name location platform chip description type advanced default depends enumvalues group max min
#
# Usage:
#
# tools/extract_configs.py <root of source tree> [output file]
# tools/extract_configs.py <root of repo> [output file]
#
# If not specified, output file will be `pico_configs.tsv`
@ -22,19 +22,30 @@ import re
import csv
import logging
from collections import defaultdict
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
scandir = sys.argv[1]
outfile = sys.argv[2] if len(sys.argv) > 2 else 'pico_configs.tsv'
CONFIG_RE = re.compile(r'//\s+PICO_CONFIG:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$')
BASE_CONFIG_NAME = 'PICO_CONFIG'
BASE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CONFIG_NAME))
BASE_CMAKE_CONFIG_NAME = 'PICO_CMAKE_CONFIG'
BASE_CMAKE_CONFIG_RE = re.compile(r'\b{}\b'.format(BASE_CMAKE_CONFIG_NAME))
BASE_BUILD_DEFINE_NAME = 'PICO_BUILD_DEFINE'
BASE_BUILD_DEFINE_RE = re.compile(r'\b{}\b'.format(BASE_BUILD_DEFINE_NAME))
CONFIG_RE = re.compile(r'//\s+{}:\s+(\w+),\s+([^,]+)(?:,\s+(.*))?$'.format(BASE_CONFIG_NAME))
DEFINE_RE = re.compile(r'#define\s+(\w+)\s+(.+?)(\s*///.*)?$')
all_configs = {}
CHIP_NAMES = ["rp2040", "rp2350"]
chips_all_configs = defaultdict(dict)
all_attrs = set()
all_descriptions = {}
all_defines = {}
chips_all_descriptions = defaultdict(dict)
chips_all_defines = defaultdict(dict)
@ -105,7 +116,7 @@ def ValidateAttrs(config_attrs, file_path, linenum):
if _default not in _enumvalues:
raise Exception('{} at {}:{} has default value {} which isn\'t in list of enumvalues {}'.format(config_name, file_path, linenum, config_attrs['default'], config_attrs['enumvalues']))
else:
raise Exception("Found unknown PICO_CONFIG type {} at {}:{}".format(_type, file_path, linenum))
raise Exception("Found unknown {} type {} at {}:{}".format(BASE_CONFIG_NAME, _type, file_path, linenum))
@ -117,55 +128,68 @@ for dirpath, dirnames, filenames in os.walk(scandir):
file_ext = os.path.splitext(filename)[1]
if file_ext in ('.c', '.h'):
file_path = os.path.join(dirpath, filename)
applicable = "all"
for chip in (*CHIP_NAMES, "host"):
if "/{}/".format(chip) in dirpath:
applicable = chip
break
with open(file_path, encoding="ISO-8859-1") as fh:
linenum = 0
for line in fh.readlines():
linenum += 1
line = line.strip()
m = CONFIG_RE.match(line)
if m:
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
if BASE_CMAKE_CONFIG_RE.search(line):
raise Exception("Found {} at {}:{} ({}) which isn't expected in {} files".format(BASE_CMAKE_CONFIG_NAME, file_path, linenum, line, file_ext))
elif BASE_BUILD_DEFINE_RE.search(line):
raise Exception("Found {} at {}:{} ({}) which isn't expected in {} files".format(BASE_BUILD_DEFINE_NAME, file_path, linenum, line, file_ext))
elif BASE_CONFIG_RE.search(line):
m = CONFIG_RE.match(line)
if not m:
if line.startswith("//// "):
logger.info("Possible misformatted {} at {}:{} ({})".format(BASE_CONFIG_NAME, file_path, linenum, line))
else:
raise Exception("Found misformatted {} at {}:{} ({})".format(BASE_CONFIG_NAME, file_path, linenum, line))
else:
config_name = m.group(1)
config_description = m.group(2)
_attrs = m.group(3)
# allow commas to appear inside brackets by converting them to and from NULL chars
_attrs = re.sub(r'(\(.+\))', lambda m: m.group(1).replace(',', '\0'), _attrs)
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
if config_description in all_descriptions:
# relax check for the same header/variable in a different tree
if config_name != all_descriptions[config_description]['config_name'] or filename != all_descriptions[config_description]['filename_only']:
if '=' in config_description:
raise Exception("For {} at {}:{} the description was set to '{}' - has the description field been omitted?".format(config_name, file_path, linenum, config_description))
all_descriptions = chips_all_descriptions[applicable]
if config_description in all_descriptions:
raise Exception("Found description {} at {}:{} but it was already used at {}:{}".format(config_description, file_path, linenum, os.path.join(scandir, all_descriptions[config_description]['filename']), all_descriptions[config_description]['line_number']))
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'filename_only':filename, 'line_number': linenum}
else:
all_descriptions[config_description] = {'config_name': config_name, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum}
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
config_attrs = {}
prev = None
# Handle case where attr value contains a comma
for item in _attrs.split(','):
if "=" not in item:
assert(prev)
item = prev + "," + item
try:
k, v = (i.strip() for i in item.split('='))
except ValueError:
raise Exception('{} at {}:{} has malformed value {}'.format(config_name, file_path, linenum, item))
config_attrs[k] = v.replace('\0', ',')
all_attrs.add(k)
prev = item
#print(file_path, config_name, config_attrs)
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
if 'group' not in config_attrs:
raise Exception('{} at {}:{} has no group attribute'.format(config_name, file_path, linenum))
#print(file_path, config_name, config_attrs)
if config_name in all_configs:
# relax check for the same header/variable in a different tree
if filename != all_configs[config_name]['filename_only']:
#print(file_path, config_name, config_attrs)
all_configs = chips_all_configs[applicable]
if config_name in all_configs:
raise Exception("Found {} at {}:{} but it was already declared at {}:{}".format(config_name, file_path, linenum, os.path.join(scandir, all_configs[config_name]['filename']), all_configs[config_name]['line_number']))
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'filename_only' : filename, 'line_number': linenum, 'description': config_description}
else:
all_configs[config_name] = {'attrs': config_attrs, 'filename': os.path.relpath(file_path, scandir), 'line_number': linenum, 'description': config_description}
else:
m = DEFINE_RE.match(line)
if m:
@ -180,6 +204,7 @@ for dirpath, dirnames, filenames in os.walk(scandir):
m = re.match(r'^_u\(((0x)?\d+)\)$', value.lower())
if m:
value = m.group(1)
all_defines = chips_all_defines[applicable]
if name not in all_defines:
all_defines[name] = dict()
if value not in all_defines[name]:
@ -187,40 +212,98 @@ for dirpath, dirnames, filenames in os.walk(scandir):
all_defines[name][value] = (file_path, linenum)
# Check for defines with missing PICO_CONFIG entries
resolved_defines = dict()
for d in all_defines:
if d not in all_configs and d.startswith("PICO_"):
logger.warning("Potential unmarked PICO define {}".format(d))
# resolve "nested defines" - this allows e.g. USB_DPRAM_MAX to resolve to USB_DPRAM_SIZE which is set to 4096 (which then matches the relevant PICO_CONFIG entry)
for val in all_defines[d]:
if val in all_defines:
resolved_defines[d] = all_defines[val]
chips_resolved_defines = defaultdict(dict)
for applicable, all_defines in chips_all_defines.items():
for d in all_defines:
if d not in all_configs and d.startswith("PICO_"):
logger.warning("Potential unmarked PICO define {}".format(d))
resolved_defines = chips_resolved_defines[applicable]
# resolve "nested defines" - this allows e.g. USB_DPRAM_MAX to resolve to USB_DPRAM_SIZE which is set to 4096 (which then matches the relevant PICO_CONFIG entry)
for val in all_defines[d]:
if val in all_defines:
resolved_defines[d] = all_defines[val]
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
for applicable, all_configs in chips_all_configs.items():
all_defines = chips_all_defines[applicable]
for config_name, config_obj in all_configs.items():
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
ValidateAttrs(config_obj['attrs'], file_path, linenum)
ValidateAttrs(config_obj['attrs'], file_path, linenum)
# Check that default values match up
if 'default' in config_obj['attrs']:
config_default = config_obj['attrs']['default']
if config_name in all_defines:
defines_obj = all_defines[config_name]
if config_default not in defines_obj and (config_name not in resolved_defines or config_default not in resolved_defines[config_name]):
if '/' in config_default or ' ' in config_default:
continue
# There _may_ be multiple matching defines, but arbitrarily display just one in the error message
first_define_value = list(defines_obj.keys())[0]
first_define_file_path, first_define_linenum = defines_obj[first_define_value]
raise Exception('Found {} at {}:{} with a default of {}, but #define says {} (at {}:{})'.format(config_name, file_path, linenum, config_default, first_define_value, first_define_file_path, first_define_linenum))
else:
raise Exception('Found {} at {}:{} with a default of {}, but no matching #define found'.format(config_name, file_path, linenum, config_default))
# Check that default values match up
if 'default' in config_obj['attrs']:
config_default = config_obj['attrs']['default']
if config_name in all_defines:
defines_obj = all_defines[config_name]
if config_default not in defines_obj and (config_name not in resolved_defines or config_default not in resolved_defines[config_name]):
if '/' in config_default or ' ' in config_default:
continue
# There _may_ be multiple matching defines, but arbitrarily display just one in the error message
first_define_value = list(defines_obj.keys())[0]
first_define_file_path, first_define_linenum = defines_obj[first_define_value]
raise Exception('Found {} at {}:{} with a default of {}, but #define says {} (at {}:{})'.format(config_name, file_path, linenum, config_default, first_define_value, first_define_file_path, first_define_linenum))
else:
raise Exception('Found {} at {}:{} with a default of {}, but no matching #define found'.format(config_name, file_path, linenum, config_default))
# All settings in "host" should also be in "all"
for config_name, config_obj in chips_all_configs["host"].items():
if config_name not in chips_all_configs["all"]:
file_path = os.path.join(scandir, config_obj['filename'])
linenum = config_obj['line_number']
raise Exception("Found 'host' config {} at {}:{}, but no matching non-host config found".format(config_name, file_path, linenum))
# Any chip-specific settings should not be in "all"
for chip in CHIP_NAMES:
for config_name, chip_config_obj in chips_all_configs[chip].items():
if config_name in chips_all_configs["all"]:
all_config_obj = chips_all_configs["all"][config_name]
chip_file_path = os.path.join(scandir, chip_config_obj['filename'])
chip_linenum = chip_config_obj['line_number']
all_file_path = os.path.join(scandir, all_config_obj['filename'])
all_linenum = all_config_obj['line_number']
raise Exception("'{}' config {} at {}:{} also found at {}:{}".format(chip, config_name, chip_file_path, chip_linenum, all_file_path, all_linenum))
def build_mismatch_exception_message(name, thing, config_obj1, value1, config_obj2, value2):
obj1_filepath = os.path.join(scandir, config_obj1['filename'])
obj2_filepath = os.path.join(scandir, config_obj2['filename'])
return "'{}' {} mismatch at {}:{} ({}) and {}:{} ({})".format(name, thing, obj1_filepath, config_obj1['line_number'], value1, obj2_filepath, config_obj2['line_number'], value2)
# Check that any identically-named setttings have appropriate matching attributes
for applicable in chips_all_configs:
for other in chips_all_configs:
if other == applicable:
continue
for config_name, applicable_config_obj in chips_all_configs[applicable].items():
if config_name in chips_all_configs[other]:
other_config_obj = chips_all_configs[other][config_name]
# Check that fields match
for field in ['description']:
applicable_value = applicable_config_obj[field]
other_value = other_config_obj[field]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, field, applicable_config_obj, applicable_value, other_config_obj, other_value))
# Check that attributes match
for attr in applicable_config_obj['attrs']:
if attr != 'default': # totally fine for defaults to vary per-platform
applicable_value = applicable_config_obj['attrs'][attr]
other_value = other_config_obj['attrs'][attr]
if applicable_value != other_value:
raise Exception(build_mismatch_exception_message(config_name, "attribute '{}'".format(attr), applicable_config_obj, applicable_value, other_config_obj, other_value))
# Sort the output alphabetically by name and then by chip
output_rows = set()
for chip in (*CHIP_NAMES, "host", "all"):
if chip in chips_all_configs:
all_configs = chips_all_configs[chip]
for config_name in all_configs:
output_rows.add((config_name, chip))
with open(outfile, 'w', newline='') as csvfile:
fieldnames = ('name', 'location', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
fieldnames = ('name', 'location', 'platform', 'chip', 'description', 'type') + tuple(sorted(all_attrs - set(['type'])))
writer = csv.DictWriter(csvfile, fieldnames=fieldnames, extrasaction='ignore', dialect='excel-tab')
writer.writeheader()
for config_name, config_obj in sorted(all_configs.items()):
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'description': config_obj['description'], **config_obj['attrs']})
for config_name, chip in sorted(output_rows):
config_obj = chips_all_configs[chip][config_name]
writer.writerow({'name': config_name, 'location': '/{}:{}'.format(config_obj['filename'], config_obj['line_number']), 'platform': "host" if chip == "host" else "rp2", 'chip': chip if chip in CHIP_NAMES else "all", 'description': config_obj['description'], **config_obj['attrs']})