mirror of
https://github.com/hathach/tinyusb.git
synced 2025-02-05 18:40:28 +00:00
More ci tweak (#2636)
* change concurrency group to ${{ github.workflow }}-${{ github.ref }} * use argparse for build.py hil_test.py, remove the need to install click * move ci win/mac to build_cmake.yml * rename build_family.yml to build_util.yml * build_util.yml support esp32 * integrate build-espressif into build.yml * build.py support make with --board option * add get_deps action * update hil test to reuse action
This commit is contained in:
parent
bf9cf107c6
commit
3e2ea77506
@ -1,30 +1,29 @@
|
||||
name: Prepare to build
|
||||
name: Get dependencies
|
||||
|
||||
inputs:
|
||||
family:
|
||||
arg:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Checkout pico-sdk for rp2040
|
||||
if: contains(inputs.family, 'rp2040')
|
||||
if: contains(inputs.arg, 'rp2040') || contains(inputs.arg, 'raspberry_pi_pico')
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: raspberrypi/pico-sdk
|
||||
ref: develop
|
||||
path: pico-sdk
|
||||
|
||||
- name: Get Dependencies
|
||||
- name: Linux dependencies
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo apt install -y ninja-build
|
||||
pip install click
|
||||
python3 tools/get_deps.py ${{ inputs.family }}
|
||||
echo >> $GITHUB_ENV "PICO_SDK_PATH=$GITHUB_WORKSPACE/pico-sdk"
|
||||
shell: bash
|
||||
|
||||
- name: Get Dependencies
|
||||
run: |
|
||||
python3 tools/get_deps.py ${{ inputs.arg }}
|
||||
echo "PICO_SDK_PATH=${{ github.workspace }}/pico-sdk" >> $GITHUB_ENV
|
||||
shell: bash
|
18
.github/actions/setup_toolchain/action.yml
vendored
18
.github/actions/setup_toolchain/action.yml
vendored
@ -8,6 +8,11 @@ inputs:
|
||||
required: false
|
||||
type: string
|
||||
|
||||
outputs:
|
||||
build_option:
|
||||
description: 'Build option for the toolchain e.g --toolchain clang'
|
||||
value: ${{ steps.set-toolchain-option.outputs.build_option }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@ -19,7 +24,7 @@ runs:
|
||||
|
||||
- name: Pull ESP-IDF docker
|
||||
if: inputs.toolchain == 'esp-idf'
|
||||
run: docker pull espressif/idf:latest
|
||||
run: docker pull espressif/idf:${{ inputs.toolchain_url }}
|
||||
shell: bash
|
||||
|
||||
- name: Download Toolchain
|
||||
@ -29,3 +34,14 @@ runs:
|
||||
uses: ./.github/actions/setup_toolchain/download
|
||||
with:
|
||||
toolchain_url: ${{ inputs.toolchain_url }}
|
||||
|
||||
- name: Set toolchain option
|
||||
id: set-toolchain-option
|
||||
run: |
|
||||
BUILD_OPTION=""
|
||||
if [[ "${{ inputs.toolchain }}" == *"clang"* ]]; then
|
||||
BUILD_OPTION="--toolchain clang"
|
||||
fi
|
||||
echo "build_option=$BUILD_OPTION"
|
||||
echo "build_option=$BUILD_OPTION" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
46
.github/workflows/build_cmake.yml
vendored
46
.github/workflows/build_cmake.yml
vendored
@ -12,6 +12,7 @@ on:
|
||||
- 'tools/build.py'
|
||||
- '.github/actions/**'
|
||||
- '.github/workflows/build_cmake.yml'
|
||||
- '.github/workflows/build_util.yml'
|
||||
- '.github/workflows/ci_set_matrix.py'
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
@ -24,9 +25,10 @@ on:
|
||||
- 'tools/build.py'
|
||||
- '.github/actions/**'
|
||||
- '.github/workflows/build_cmake.yml'
|
||||
- '.github/workflows/build_util.yml'
|
||||
- '.github/workflows/ci_set_matrix.py'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@ -55,7 +57,7 @@ jobs:
|
||||
# ---------------------------------------
|
||||
cmake:
|
||||
needs: set-matrix
|
||||
uses: ./.github/workflows/build_family.yml
|
||||
uses: ./.github/workflows/build_util.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -69,14 +71,14 @@ jobs:
|
||||
build-system: 'cmake'
|
||||
toolchain: ${{ matrix.toolchain }}
|
||||
toolchain_url: ${{ fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].toolchain_url }}
|
||||
build-family: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].family) }}
|
||||
build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].family) }}
|
||||
|
||||
# ---------------------------------------
|
||||
# Build Make
|
||||
# ---------------------------------------
|
||||
make:
|
||||
needs: set-matrix
|
||||
uses: ./.github/workflows/build_family.yml
|
||||
uses: ./.github/workflows/build_util.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -90,4 +92,38 @@ jobs:
|
||||
build-system: 'make'
|
||||
toolchain: ${{ matrix.toolchain }}
|
||||
toolchain_url: ${{ fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].toolchain_url }}
|
||||
build-family: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].family) }}
|
||||
build-args: ${{ toJSON(fromJSON(needs.set-matrix.outputs.json)[matrix.toolchain].family) }}
|
||||
|
||||
# ---------------------------------------
|
||||
# Build Make on Windows/MacOS
|
||||
# ---------------------------------------
|
||||
make-os:
|
||||
uses: ./.github/workflows/build_util.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest, macos-latest]
|
||||
with:
|
||||
os: ${{ matrix.os }}
|
||||
build-system: 'make'
|
||||
toolchain: 'arm-gcc'
|
||||
build-args: '["-bstm32f411disco"]'
|
||||
|
||||
# ---------------------------------------
|
||||
# Build Espressif
|
||||
# ---------------------------------------
|
||||
espressif:
|
||||
uses: ./.github/workflows/build_util.yml
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
board:
|
||||
# ESP32-S2
|
||||
- 'espressif_kaluga_1'
|
||||
# ESP32-S3 skip since devkitm is also compiled in hil-test workflow
|
||||
#- 'espressif_s3_devkitm'
|
||||
with:
|
||||
build-system: 'cmake'
|
||||
toolchain: 'esp-idf'
|
||||
toolchain_url: 'v5.1.1'
|
||||
build-args: '["-b${{ matrix.board }}"]'
|
||||
|
109
.github/workflows/build_esp.yml
vendored
109
.github/workflows/build_esp.yml
vendored
@ -1,109 +0,0 @@
|
||||
name: Build ESP
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'examples/**'
|
||||
- 'lib/**'
|
||||
- 'hw/**'
|
||||
- 'test/hil/**'
|
||||
- '.github/workflows/build_esp.yml'
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'examples/**'
|
||||
- 'lib/**'
|
||||
- 'hw/**'
|
||||
- 'test/hil/**'
|
||||
- '.github/workflows/build_esp.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-esp:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
board:
|
||||
# ESP32-S2
|
||||
- 'espressif_kaluga_1'
|
||||
# ESP32-S3
|
||||
- 'espressif_s3_devkitm'
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Toolchain
|
||||
uses: ./.github/actions/setup_toolchain
|
||||
with:
|
||||
toolchain: 'esp-idf'
|
||||
|
||||
- name: Build
|
||||
run: docker run --rm -v $PWD:/project -w /project espressif/idf:v5.1.1 python3 tools/build.py -b ${{ matrix.board }}
|
||||
|
||||
- name: Upload Artifacts for Hardware Testing
|
||||
if: matrix.board == 'espressif_s3_devkitm' && github.repository_owner == 'hathach'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.board }}
|
||||
path: |
|
||||
cmake-build/cmake-build-${{ matrix.board }}/*/*/bootloader/bootloader.bin
|
||||
cmake-build/cmake-build-${{ matrix.board }}/*/*/*.bin
|
||||
cmake-build/cmake-build-${{ matrix.board }}/*/*/partition_table/partition-table.bin
|
||||
cmake-build/cmake-build-${{ matrix.board }}/*/*/config.env
|
||||
cmake-build/cmake-build-${{ matrix.board }}/*/*/flash_args
|
||||
|
||||
# ---------------------------------------
|
||||
# Hardware in the loop (HIL)
|
||||
# Current self-hosted instance is running on an RPI4. For attached hardware checkout hil_pi4.json
|
||||
# ---------------------------------------
|
||||
hil-test:
|
||||
# run only with hathach's commit due to limited resource on RPI4
|
||||
if: github.repository_owner == 'hathach'
|
||||
needs: build-esp
|
||||
runs-on: [self-hosted, esp32s3, hardware-in-the-loop]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
board:
|
||||
- 'espressif_s3_devkitm'
|
||||
steps:
|
||||
- name: Clean workspace
|
||||
run: |
|
||||
echo "Cleaning up previous run"
|
||||
rm -rf "${{ github.workspace }}"
|
||||
mkdir -p "${{ github.workspace }}"
|
||||
|
||||
# USB bus on rpi4 is not stable, reset it before testing
|
||||
- name: Reset USB bus
|
||||
run: |
|
||||
lsusb
|
||||
lsusb -t
|
||||
# reset VIA Labs 2.0 hub
|
||||
sudo usbreset 001/002
|
||||
|
||||
- name: Checkout test/hil
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: test/hil
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.board }}
|
||||
path: cmake-build/cmake-build-${{ matrix.board }}
|
||||
|
||||
- name: Test on actual hardware
|
||||
run: |
|
||||
python3 test/hil/hil_test.py --board ${{ matrix.board }} pi4_esp32.json
|
64
.github/workflows/build_family.yml
vendored
64
.github/workflows/build_family.yml
vendored
@ -1,64 +0,0 @@
|
||||
name: Build family
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build-system:
|
||||
required: true
|
||||
type: string
|
||||
toolchain:
|
||||
required: true
|
||||
type: string
|
||||
toolchain_url:
|
||||
required: true
|
||||
type: string
|
||||
build-family:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
family:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
family: ${{ fromJSON(inputs.build-family) }}
|
||||
steps:
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Setup Toolchain
|
||||
uses: ./.github/actions/setup_toolchain
|
||||
with:
|
||||
toolchain: ${{ inputs.toolchain }}
|
||||
toolchain_url: ${{ inputs.toolchain_url }}
|
||||
|
||||
- name: Checkout pico-sdk for rp2040
|
||||
if: contains(matrix.family, 'rp2040')
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: raspberrypi/pico-sdk
|
||||
ref: develop
|
||||
path: pico-sdk
|
||||
|
||||
- name: Get Dependencies
|
||||
run: |
|
||||
sudo apt install -y ninja-build
|
||||
pip install click
|
||||
python3 tools/get_deps.py ${{ matrix.family }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
OPTION=""
|
||||
if [[ "${{ inputs.toolchain }}" == *"clang"* ]]; then
|
||||
OPTION="--toolchain clang"
|
||||
fi
|
||||
echo "OPTION=$OPTION"
|
||||
python tools/build.py -s ${{ inputs.build-system }} $OPTION ${{ matrix.family }}
|
||||
env:
|
||||
PICO_SDK_PATH: ${{ github.workspace }}/pico-sdk
|
2
.github/workflows/build_iar.yml
vendored
2
.github/workflows/build_iar.yml
vendored
@ -23,7 +23,7 @@ on:
|
||||
- '.github/workflows/build_iar.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
3
.github/workflows/build_renesas.yml
vendored
3
.github/workflows/build_renesas.yml
vendored
@ -21,7 +21,7 @@ on:
|
||||
- '.github/workflows/build_renesas.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@ -65,7 +65,6 @@ jobs:
|
||||
|
||||
- name: Get Dependencies
|
||||
run: |
|
||||
pip install click
|
||||
python3 tools/get_deps.py ${{ matrix.family }}
|
||||
|
||||
- name: Build
|
||||
|
59
.github/workflows/build_util.yml
vendored
Normal file
59
.github/workflows/build_util.yml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
name: Reusable build util
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build-system:
|
||||
required: true
|
||||
type: string
|
||||
toolchain:
|
||||
required: true
|
||||
type: string
|
||||
toolchain_url:
|
||||
required: false
|
||||
type: string
|
||||
build-args:
|
||||
required: true
|
||||
type: string
|
||||
os:
|
||||
required: false
|
||||
type: string
|
||||
default: 'ubuntu-latest'
|
||||
|
||||
jobs:
|
||||
family:
|
||||
runs-on: ${{ inputs.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arg: ${{ fromJSON(inputs.build-args) }}
|
||||
steps:
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Setup Toolchain
|
||||
id: setup-toolchain
|
||||
uses: ./.github/actions/setup_toolchain
|
||||
with:
|
||||
toolchain: ${{ inputs.toolchain }}
|
||||
toolchain_url: ${{ inputs.toolchain_url }}
|
||||
|
||||
- name: Get Dependencies
|
||||
uses: ./.github/actions/get_deps
|
||||
with:
|
||||
arg: ${{ matrix.arg }}
|
||||
|
||||
- name: Build
|
||||
if: inputs.toolchain != 'esp-idf'
|
||||
run: |
|
||||
python tools/build.py -s ${{ inputs.build-system }} ${{ steps.setup-toolchain.outputs.build_option }} ${{ matrix.arg }}
|
||||
|
||||
- name: Build using ESP-IDF docker
|
||||
if: inputs.toolchain == 'esp-idf'
|
||||
run: |
|
||||
docker run --rm -v $PWD:/project -w /project espressif/idf:${{ inputs.toolchain_url }} python3 tools/build.py ${{ matrix.arg }}
|
56
.github/workflows/build_win_mac.yml
vendored
56
.github/workflows/build_win_mac.yml
vendored
@ -1,56 +0,0 @@
|
||||
name: Build Windows/MacOS
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'examples/**'
|
||||
- 'lib/**'
|
||||
- 'hw/**'
|
||||
- '.github/workflows/build_win_mac.yml'
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- 'src/**'
|
||||
- 'examples/**'
|
||||
- 'lib/**'
|
||||
- 'hw/**'
|
||||
- '.github/workflows/build_win_mac.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ---------------------------------------
|
||||
# Build ARM family
|
||||
# ---------------------------------------
|
||||
build-arm:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install ARM GCC
|
||||
uses: carlosperate/arm-none-eabi-gcc-action@v1
|
||||
with:
|
||||
release: '10.3-2021.10'
|
||||
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get Dependencies
|
||||
run: |
|
||||
pip install click
|
||||
python3 tools/get_deps.py stm32f4
|
||||
|
||||
- name: Build
|
||||
run: python3 tools/build.py -s make stm32f2
|
2
.github/workflows/ci_set_matrix.py
vendored
2
.github/workflows/ci_set_matrix.py
vendored
@ -16,7 +16,7 @@ family_list = {
|
||||
"ch32v307 fomu gd32vf103": ["riscv-gcc"],
|
||||
"imxrt": ["arm-gcc", "arm-clang"],
|
||||
"kinetis_k kinetis_kl kinetis_k32l2": ["arm-gcc", "arm-clang"],
|
||||
"lpc11 lpc13 lpc15": ["arm-gcc"],
|
||||
"lpc11 lpc13 lpc15": ["arm-gcc", "arm-clang"],
|
||||
"lpc17 lpc18 lpc40 lpc43": ["arm-gcc", "arm-clang"],
|
||||
"lpc51 lpc54 lpc55": ["arm-gcc", "arm-clang"],
|
||||
"mcx": ["arm-gcc"],
|
||||
|
121
.github/workflows/hil_test.yml
vendored
121
.github/workflows/hil_test.yml
vendored
@ -24,15 +24,18 @@ on:
|
||||
- '.github/actions/**'
|
||||
- '.github/workflows/hil_test.yml'
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ---------------------------------------
|
||||
# Build Non Espressif
|
||||
# ---------------------------------------
|
||||
build:
|
||||
if: github.repository_owner == 'hathach'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
BOARD_LIST: ${{ steps.parse_hil_json.outputs.BOARD_LIST }}
|
||||
BOARDS_LIST: ${{ steps.parse_hil_json.outputs.BOARDS_LIST }}
|
||||
steps:
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
@ -42,38 +45,29 @@ jobs:
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install ARM GCC
|
||||
uses: carlosperate/arm-none-eabi-gcc-action@v1
|
||||
with:
|
||||
release: '12.3.Rel1'
|
||||
|
||||
- name: Parse HIL json
|
||||
id: parse_hil_json
|
||||
run: |
|
||||
sudo apt install -y jq
|
||||
BOARD_LIST=$(jq -r '.boards[] | "-b " + .name' test/hil/pi4.json | tr '\n' ' ')
|
||||
echo "BOARD_LIST=$BOARD_LIST"
|
||||
echo >> $GITHUB_ENV "BOARD_LIST=$BOARD_LIST"
|
||||
echo >> $GITHUB_OUTPUT "BOARD_LIST=$BOARD_LIST"
|
||||
|
||||
- name: Checkout pico-sdk for rp2040
|
||||
uses: actions/checkout@v4
|
||||
# Non-Espresif boards
|
||||
BOARDS_LIST=$(jq -r '.boards[] | select(.flasher != "esptool") | "-b " + .name' test/hil/pi4.json | tr '\n' ' ')
|
||||
echo "BOARDS_LIST=$BOARDS_LIST"
|
||||
echo "BOARDS_LIST=$BOARDS_LIST" >> $GITHUB_ENV
|
||||
echo "BOARDS_LIST=$BOARDS_LIST" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup ARM Toolchain
|
||||
uses: ./.github/actions/setup_toolchain
|
||||
with:
|
||||
repository: raspberrypi/pico-sdk
|
||||
ref: develop
|
||||
path: pico-sdk
|
||||
toolchain: 'arm-gcc'
|
||||
|
||||
- name: Get Dependencies
|
||||
run: |
|
||||
pip install click
|
||||
sudo apt install -y ninja-build
|
||||
python3 tools/get_deps.py $BOARD_LIST
|
||||
uses: ./.github/actions/get_deps
|
||||
with:
|
||||
arg: ${{ env.BOARDS_LIST }}
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
python tools/build.py $BOARD_LIST
|
||||
env:
|
||||
PICO_SDK_PATH: ${{ github.workspace }}/pico-sdk
|
||||
run: python tools/build.py $BOARDS_LIST
|
||||
|
||||
- name: Upload Artifacts for Hardware Testing
|
||||
uses: actions/upload-artifact@v4
|
||||
@ -83,16 +77,71 @@ jobs:
|
||||
cmake-build/cmake-build-*/*/*/*.elf
|
||||
cmake-build/cmake-build-*/*/*/*.bin
|
||||
|
||||
# ---------------------------------------
|
||||
# Build Espressif
|
||||
# ---------------------------------------
|
||||
build-esp:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
BOARDS_LIST: ${{ steps.parse_hil_json.outputs.BOARDS_LIST }}
|
||||
steps:
|
||||
- name: Checkout TinyUSB
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Parse HIL json
|
||||
id: parse_hil_json
|
||||
run: |
|
||||
sudo apt install -y jq
|
||||
# Espressif boards
|
||||
BOARDS_LIST=$(jq -r '.boards[] | select(.flasher == "esptool") | "-b " + .name' test/hil/pi4.json | tr '\n' ' ')
|
||||
echo "BOARDS_LIST=$BOARDS_LIST"
|
||||
echo "BOARDS_LIST=$BOARDS_LIST" >> $GITHUB_ENV
|
||||
echo "BOARDS_LIST=$BOARDS_LIST" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Setup ESP-IDF
|
||||
if: env.BOARDS_LIST != ''
|
||||
uses: ./.github/actions/setup_toolchain
|
||||
with:
|
||||
toolchain: 'esp-idf'
|
||||
toolchain_url: 'v5.1.1'
|
||||
|
||||
- name: Get Dependencies
|
||||
uses: ./.github/actions/get_deps
|
||||
with:
|
||||
arg: ${{ env.BOARDS_LIST }}
|
||||
|
||||
- name: Build Espressif
|
||||
if: env.BOARDS_LIST != ''
|
||||
run: docker run --rm -v $PWD:/project -w /project espressif/idf:v5.1.1 python3 tools/build.py $BOARDS_LIST
|
||||
|
||||
- name: Upload Artifacts for Hardware Testing
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: hil_pi4_esp
|
||||
path: |
|
||||
cmake-build/cmake-build-*/*/*/*.bin
|
||||
cmake-build/cmake-build-*/*/*/bootloader/bootloader.bin
|
||||
cmake-build/cmake-build-*/*/*/partition_table/partition-table.bin
|
||||
cmake-build/cmake-build-*/*/*/config.env
|
||||
cmake-build/cmake-build-*/*/*/flash_args
|
||||
|
||||
# ---------------------------------------
|
||||
# Hardware in the loop (HIL)
|
||||
# Current self-hosted instance is running on an RPI4. For attached hardware checkout hil_pi4.json
|
||||
# Current self-hosted instance is running on an RPI4. For attached hardware checkout test/hil/pi4.json
|
||||
# ---------------------------------------
|
||||
hil-pi4:
|
||||
if: github.repository_owner == 'hathach'
|
||||
needs: build
|
||||
runs-on: [self-hosted, rp2040, nrf52840, hardware-in-the-loop]
|
||||
needs:
|
||||
- build
|
||||
- build-esp
|
||||
runs-on: [self-hosted, rp2040, nrf52840, esp32s3, hardware-in-the-loop]
|
||||
env:
|
||||
BOARD_LIST: ${{ needs.build.outputs.BOARD_LIST }}
|
||||
BOARDS_LIST: "${{ needs.build.outputs.BOARDS_LIST }} ${{ needs.build-esp.outputs.BOARDS_LIST }}"
|
||||
steps:
|
||||
- name: Clean workspace
|
||||
run: |
|
||||
@ -103,8 +152,7 @@ jobs:
|
||||
# USB bus on rpi4 is not stable, reset it before testing
|
||||
- name: Reset USB bus
|
||||
run: |
|
||||
lsusb
|
||||
lsusb -t
|
||||
# lsusb -t
|
||||
# reset VIA Labs 2.0 hub
|
||||
sudo usbreset 001/002
|
||||
|
||||
@ -119,7 +167,16 @@ jobs:
|
||||
name: hil_pi4
|
||||
path: cmake-build
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: hil_pi4_esp
|
||||
path: cmake-build
|
||||
|
||||
- name: Test on actual hardware
|
||||
run: |
|
||||
echo "BOARD_LIST=$BOARD_LIST"
|
||||
python3 test/hil/hil_test.py $BOARD_LIST pi4.json
|
||||
echo "BOARDS_LIST=$BOARDS_LIST"
|
||||
echo "::group::{cmake-build contents}"
|
||||
tree cmake-build
|
||||
echo "::endgroup::"
|
||||
python3 test/hil/hil_test.py $BOARDS_LIST pi4.json
|
||||
|
3
.github/workflows/pre-commit.yml
vendored
3
.github/workflows/pre-commit.yml
vendored
@ -7,7 +7,7 @@ on:
|
||||
branches: [ master ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@ -38,7 +38,6 @@ jobs:
|
||||
|
||||
- name: Build Fuzzer
|
||||
run: |
|
||||
pip install click
|
||||
export CC=clang
|
||||
export CXX=clang++
|
||||
fuzz_harness=$(ls -d test/fuzz/device/*/)
|
||||
|
@ -9,6 +9,8 @@ mcu:STM32F0
|
||||
mcu:KINETIS_KL
|
||||
family:broadcom_64bit
|
||||
family:broadcom_32bit
|
||||
family:espressif
|
||||
board:curiosity_nano
|
||||
board:frdm_kl25z
|
||||
family:espressif
|
||||
# lpc55 has weird error 'ncm_interface' causes a section type conflict with 'ntb_parameters'
|
||||
family:lpc55
|
||||
|
@ -12,3 +12,5 @@ board:lpcxpresso11u68
|
||||
board:stm32f303disco
|
||||
board:stm32l412nucleo
|
||||
board:ek_tm4c123gxl
|
||||
board:uno_r4
|
||||
board:ra4m1_ek
|
||||
|
@ -8,5 +8,4 @@ mcu:MIMXRT10XX
|
||||
mcu:MIMXRT11XX
|
||||
mcu:MSP432E4
|
||||
mcu:RX65X
|
||||
mcu:RAXXX
|
||||
mcu:MAX3421
|
||||
|
@ -22,8 +22,8 @@ LDFLAGS_GCC += --specs=nosys.specs --specs=nano.specs
|
||||
|
||||
SRC_C += \
|
||||
src/portable/nxp/lpc17_40/dcd_lpc17_40.c \
|
||||
src/portable/ohci/ohci.c \
|
||||
src/portable/nxp/lpc17_40/hcd_lpc17_40.c \
|
||||
src/portable/ohci/ohci.c \
|
||||
$(MCU_DIR)/../gcc/cr_startup_lpc175x_6x.c \
|
||||
$(MCU_DIR)/src/chip_17xx_40xx.c \
|
||||
$(MCU_DIR)/src/clock_17xx_40xx.c \
|
||||
|
@ -20,6 +20,8 @@ LDFLAGS_GCC += --specs=nosys.specs --specs=nano.specs
|
||||
# All source paths should be relative to the top level.
|
||||
SRC_C += \
|
||||
src/portable/nxp/lpc17_40/dcd_lpc17_40.c \
|
||||
src/portable/nxp/lpc17_40/hcd_lpc17_40.c \
|
||||
src/portable/ohci/ohci.c \
|
||||
$(MCU_DIR)/../gcc/cr_startup_lpc40xx.c \
|
||||
$(MCU_DIR)/src/chip_17xx_40xx.c \
|
||||
$(MCU_DIR)/src/clock_17xx_40xx.c \
|
||||
|
@ -112,7 +112,7 @@ typedef struct {
|
||||
bool notification_xmit_is_running; // notification is currently transmitted
|
||||
} ncm_interface_t;
|
||||
|
||||
CFG_TUSB_MEM_SECTION CFG_TUSB_MEM_ALIGN tu_static ncm_interface_t ncm_interface;
|
||||
CFG_TUD_MEM_SECTION CFG_TUD_MEM_ALIGN tu_static ncm_interface_t ncm_interface;
|
||||
|
||||
/**
|
||||
* This is the NTB parameter structure
|
||||
@ -120,7 +120,7 @@ CFG_TUSB_MEM_SECTION CFG_TUSB_MEM_ALIGN tu_static ncm_interface_t ncm_interface;
|
||||
* \attention
|
||||
* We are lucky, that byte order is correct
|
||||
*/
|
||||
CFG_TUSB_MEM_SECTION CFG_TUSB_MEM_ALIGN tu_static const ntb_parameters_t ntb_parameters = {
|
||||
CFG_TUD_MEM_SECTION CFG_TUD_MEM_ALIGN tu_static const ntb_parameters_t ntb_parameters = {
|
||||
.wLength = sizeof(ntb_parameters_t),
|
||||
.bmNtbFormatsSupported = 0x01,// 16-bit NTB supported
|
||||
.dwNtbInMaxSize = CFG_TUD_NCM_IN_NTB_MAX_SIZE,
|
||||
@ -285,7 +285,7 @@ static xmit_ntb_t *xmit_get_next_ready_ntb(void) {
|
||||
* This must be called from netd_xfer_cb() so that ep_in is ready
|
||||
*/
|
||||
static bool xmit_insert_required_zlp(uint8_t rhport, uint32_t xferred_bytes) {
|
||||
TU_LOG_DRV("xmit_insert_required_zlp(%d,%d)\n", rhport, xferred_bytes);
|
||||
TU_LOG_DRV("xmit_insert_required_zlp(%d,%ld)\n", rhport, xferred_bytes);
|
||||
|
||||
if (xferred_bytes == 0 || xferred_bytes % CFG_TUD_NET_ENDPOINT_SIZE != 0) {
|
||||
return false;
|
||||
@ -521,11 +521,11 @@ static bool recv_validate_datagram(const recv_ntb_t *ntb, uint32_t len) {
|
||||
return false;
|
||||
}
|
||||
if (len < sizeof(nth16_t) + sizeof(ndp16_t) + 2 * sizeof(ndp16_datagram_t)) {
|
||||
TU_LOG_DRV("(EE) ill min len: %d\n", len);
|
||||
TU_LOG_DRV("(EE) ill min len: %lu\n", len);
|
||||
return false;
|
||||
}
|
||||
if (nth16->wBlockLength > len) {
|
||||
TU_LOG_DRV("(EE) ill block length: %d > %d\n", nth16->wBlockLength, len);
|
||||
TU_LOG_DRV("(EE) ill block length: %d > %lu\n", nth16->wBlockLength, len);
|
||||
return false;
|
||||
}
|
||||
if (nth16->wBlockLength > CFG_TUD_NCM_OUT_NTB_MAX_SIZE) {
|
||||
@ -533,7 +533,7 @@ static bool recv_validate_datagram(const recv_ntb_t *ntb, uint32_t len) {
|
||||
return false;
|
||||
}
|
||||
if (nth16->wNdpIndex < sizeof(nth16) || nth16->wNdpIndex > len - (sizeof(ndp16_t) + 2 * sizeof(ndp16_datagram_t))) {
|
||||
TU_LOG_DRV("(EE) ill position of first ndp: %d (%d)\n", nth16->wNdpIndex, len);
|
||||
TU_LOG_DRV("(EE) ill position of first ndp: %d (%lu)\n", nth16->wNdpIndex, len);
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -567,11 +567,11 @@ static bool recv_validate_datagram(const recv_ntb_t *ntb, uint32_t len) {
|
||||
while (ndp16_datagram[ndx].wDatagramIndex != 0 && ndp16_datagram[ndx].wDatagramLength != 0) {
|
||||
TU_LOG_DRV(" << %d %d\n", ndp16_datagram[ndx].wDatagramIndex, ndp16_datagram[ndx].wDatagramLength);
|
||||
if (ndp16_datagram[ndx].wDatagramIndex > len) {
|
||||
TU_LOG_DRV("(EE) ill start of datagram[%d]: %d (%d)\n", ndx, ndp16_datagram[ndx].wDatagramIndex, len);
|
||||
TU_LOG_DRV("(EE) ill start of datagram[%d]: %d (%lu)\n", ndx, ndp16_datagram[ndx].wDatagramIndex, len);
|
||||
return false;
|
||||
}
|
||||
if (ndp16_datagram[ndx].wDatagramIndex + ndp16_datagram[ndx].wDatagramLength > len) {
|
||||
TU_LOG_DRV("(EE) ill end of datagram[%d]: %d (%d)\n", ndx, ndp16_datagram[ndx].wDatagramIndex + ndp16_datagram[ndx].wDatagramLength, len);
|
||||
TU_LOG_DRV("(EE) ill end of datagram[%d]: %d (%lu)\n", ndx, ndp16_datagram[ndx].wDatagramIndex + ndp16_datagram[ndx].wDatagramLength, len);
|
||||
return false;
|
||||
}
|
||||
++ndx;
|
||||
|
@ -25,10 +25,10 @@
|
||||
# ACTION=="add", SUBSYSTEM=="tty", SUBSYSTEMS=="usb", MODE="0666", PROGRAM="/bin/sh -c 'echo $$ID_SERIAL_SHORT | rev | cut -c -8 | rev'", SYMLINK+="ttyUSB_%c.%s{bInterfaceNumber}"
|
||||
# ACTION=="add", SUBSYSTEM=="block", SUBSYSTEMS=="usb", ENV{ID_FS_USAGE}=="filesystem", MODE="0666", PROGRAM="/bin/sh -c 'echo $$ID_SERIAL_SHORT | rev | cut -c -8 | rev'", RUN{program}+="/usr/bin/systemd-mount --no-block --automount=yes --collect $devnode /media/blkUSB_%c.%s{bInterfaceNumber}"
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import click
|
||||
import serial
|
||||
import subprocess
|
||||
import json
|
||||
@ -318,13 +318,18 @@ def test_hid_composite_freertos(id):
|
||||
# -------------------------------------------------------------
|
||||
# Main
|
||||
# -------------------------------------------------------------
|
||||
@click.command()
|
||||
@click.argument('config_file')
|
||||
@click.option('-b', '--board', multiple=True, default=None, help='Boards to test, all if not specified')
|
||||
def main(config_file, board):
|
||||
def main():
|
||||
"""
|
||||
Hardware test on specified boards
|
||||
"""
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('config_file', help='Configuration JSON file')
|
||||
parser.add_argument('-b', '--board', action='append', default=[], help='Boards to test, all if not specified')
|
||||
args = parser.parse_args()
|
||||
|
||||
config_file = args.config_file
|
||||
boards = args.board
|
||||
|
||||
config_file = os.path.join(os.path.dirname(__file__), config_file)
|
||||
with open(config_file) as f:
|
||||
config = json.load(f)
|
||||
@ -334,10 +339,10 @@ def main(config_file, board):
|
||||
'cdc_dual_ports', 'cdc_msc', 'dfu', 'dfu_runtime', 'hid_boot_interface',
|
||||
]
|
||||
|
||||
if len(board) == 0:
|
||||
if len(boards) == 0:
|
||||
config_boards = config['boards']
|
||||
else:
|
||||
config_boards = [e for e in config['boards'] if e['name'] in board]
|
||||
config_boards = [e for e in config['boards'] if e['name'] in boards]
|
||||
|
||||
for item in config_boards:
|
||||
name = item['name']
|
||||
|
@ -22,6 +22,16 @@
|
||||
"flasher_product": "ItsyBitsy M4 Express",
|
||||
"flasher_reset_pin": "2",
|
||||
"flasher_args": "--offset 0x4000"
|
||||
},
|
||||
{
|
||||
"name": "espressif_s3_devkitm",
|
||||
"uid": "84F703C084E4",
|
||||
"tests": [
|
||||
"cdc_msc_freertos", "hid_composite_freertos"
|
||||
],
|
||||
"flasher": "esptool",
|
||||
"flasher_sn": "3ea619acd1cdeb11a0a0b806e93fd3f1",
|
||||
"flasher_args": "-b 1500000"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1,14 +0,0 @@
|
||||
{
|
||||
"boards": [
|
||||
{
|
||||
"name": "espressif_s3_devkitm",
|
||||
"uid": "84F703C084E4",
|
||||
"tests": [
|
||||
"cdc_msc_freertos", "hid_composite_freertos"
|
||||
],
|
||||
"flasher": "esptool",
|
||||
"flasher_sn": "3ea619acd1cdeb11a0a0b806e93fd3f1",
|
||||
"flasher_args": "-b 1500000"
|
||||
}
|
||||
]
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import subprocess
|
||||
import click
|
||||
from pathlib import Path
|
||||
from multiprocessing import Pool
|
||||
|
||||
@ -125,13 +125,20 @@ def build_family(family, toolchain, build_system):
|
||||
return ret
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument('families', nargs=-1, required=False)
|
||||
@click.option('-b', '--board', multiple=True, default=None, help='Boards to build')
|
||||
@click.option('-t', '--toolchain', default='gcc', help='Toolchain to use, default is gcc')
|
||||
@click.option('-s', '--build-system', default='cmake', help='Build system to use, default is cmake')
|
||||
def main(families, board, toolchain, build_system):
|
||||
if len(families) == 0 and len(board) == 0:
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('families', nargs='*', default=[], help='Families to build')
|
||||
parser.add_argument('-b', '--board', action='append', default=[], help='Boards to build')
|
||||
parser.add_argument('-t', '--toolchain', default='gcc', help='Toolchain to use, default is gcc')
|
||||
parser.add_argument('-s', '--build-system', default='cmake', help='Build system to use, default is cmake')
|
||||
args = parser.parse_args()
|
||||
|
||||
families = args.families
|
||||
boards = args.board
|
||||
toolchain = args.toolchain
|
||||
build_system = args.build_system
|
||||
|
||||
if len(families) == 0 and len(boards) == 0:
|
||||
print("Please specify families or board to build")
|
||||
return 1
|
||||
|
||||
@ -159,12 +166,23 @@ def main(families, board, toolchain, build_system):
|
||||
total_result[2] += fret[2]
|
||||
|
||||
# build board (only cmake)
|
||||
if board is not None:
|
||||
for b in board:
|
||||
r = build_board_cmake(b, toolchain)
|
||||
total_result[0] += r[0]
|
||||
total_result[1] += r[1]
|
||||
total_result[2] += r[2]
|
||||
if boards is not None:
|
||||
for b in boards:
|
||||
if build_system == 'cmake':
|
||||
r = build_board_cmake(b, toolchain)
|
||||
total_result[0] += r[0]
|
||||
total_result[1] += r[1]
|
||||
total_result[2] += r[2]
|
||||
elif build_system == 'make':
|
||||
all_examples = get_examples(find_family(b))
|
||||
with Pool(processes=os.cpu_count()) as pool:
|
||||
pool_args = list((map(lambda e, bb=b, o=f"TOOLCHAIN={toolchain}": [e, bb, o], all_examples)))
|
||||
r = pool.starmap(build_utils.build_example, pool_args)
|
||||
# sum all element of same index (column sum)
|
||||
rsum = list(map(sum, list(zip(*r))))
|
||||
total_result[0] += rsum[0]
|
||||
total_result[1] += rsum[1]
|
||||
total_result[2] += rsum[2]
|
||||
|
||||
total_time = time.monotonic() - total_time
|
||||
print(build_separator)
|
||||
|
@ -1,106 +0,0 @@
|
||||
import os
|
||||
import glob
|
||||
import sys
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
import build_utils
|
||||
|
||||
SUCCEEDED = "\033[32msucceeded\033[0m"
|
||||
FAILED = "\033[31mfailed\033[0m"
|
||||
SKIPPED = "\033[33mskipped\033[0m"
|
||||
|
||||
success_count = 0
|
||||
fail_count = 0
|
||||
skip_count = 0
|
||||
exit_status = 0
|
||||
|
||||
total_time = time.monotonic()
|
||||
|
||||
build_format = '| {:30} | {:30} | {:18} | {:7} | {:6} | {:6} |'
|
||||
build_separator = '-' * 107
|
||||
|
||||
def filter_with_input(mylist):
|
||||
if len(sys.argv) > 1:
|
||||
input_args = list(set(mylist).intersection(sys.argv))
|
||||
if len(input_args) > 0:
|
||||
mylist[:] = input_args
|
||||
|
||||
|
||||
# Build all examples if not specified
|
||||
all_examples = [entry.replace('examples/', '') for entry in glob.glob("examples/*/*_freertos")]
|
||||
filter_with_input(all_examples)
|
||||
all_examples.append('device/board_test')
|
||||
all_examples.sort()
|
||||
|
||||
# Build all boards if not specified
|
||||
all_boards = []
|
||||
for entry in os.scandir("hw/bsp/espressif/boards"):
|
||||
if entry.is_dir():
|
||||
all_boards.append(entry.name)
|
||||
filter_with_input(all_boards)
|
||||
all_boards.sort()
|
||||
|
||||
def build_board(example, board):
|
||||
global success_count, fail_count, skip_count, exit_status
|
||||
start_time = time.monotonic()
|
||||
|
||||
# Check if board is skipped
|
||||
build_dir = f"cmake-build/cmake-build-{board}/{example}"
|
||||
|
||||
# Generate and build
|
||||
r = subprocess.run(f"cmake examples/{example} -B {build_dir} -G \"Ninja\" -DBOARD={board} -DMAX3421_HOST=1",
|
||||
shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
if r.returncode == 0:
|
||||
r = subprocess.run(f"cmake --build {build_dir}", shell=True, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
build_duration = time.monotonic() - start_time
|
||||
flash_size = "-"
|
||||
sram_size = "-"
|
||||
|
||||
if r.returncode == 0:
|
||||
success = SUCCEEDED
|
||||
success_count += 1
|
||||
#(flash_size, sram_size) = build_size(example, board)
|
||||
else:
|
||||
exit_status = r.returncode
|
||||
success = FAILED
|
||||
fail_count += 1
|
||||
|
||||
title = build_format.format(example, board, success, "{:.2f}s".format(build_duration), flash_size, sram_size)
|
||||
if os.getenv('CI'):
|
||||
# always print build output if in CI
|
||||
print(f"::group::{title}")
|
||||
print(r.stdout.decode("utf-8"))
|
||||
print(f"::endgroup::")
|
||||
else:
|
||||
# print build output if failed
|
||||
print(title)
|
||||
if r.returncode != 0:
|
||||
print(r.stdout.decode("utf-8"))
|
||||
|
||||
|
||||
def build_size(example, board):
|
||||
#elf_file = 'examples/device/{}/_build/{}/{}-firmware.elf'.format(example, board, board)
|
||||
elf_file = 'examples/device/{}/_build/{}/*.elf'.format(example, board)
|
||||
size_output = subprocess.run('size {}'.format(elf_file), shell=True, stdout=subprocess.PIPE).stdout.decode("utf-8")
|
||||
size_list = size_output.split('\n')[1].split('\t')
|
||||
flash_size = int(size_list[0])
|
||||
sram_size = int(size_list[1]) + int(size_list[2])
|
||||
return (flash_size, sram_size)
|
||||
|
||||
|
||||
print(build_separator)
|
||||
print(build_format.format('Example', 'Board', '\033[39mResult\033[0m', 'Time', 'Flash', 'SRAM'))
|
||||
print(build_separator)
|
||||
|
||||
for example in all_examples:
|
||||
for board in all_boards:
|
||||
build_board(example, board)
|
||||
|
||||
total_time = time.monotonic() - total_time
|
||||
print(build_separator)
|
||||
print("Build Summary: {} {}, {} {}, {} {} and took {:.2f}s".format(success_count, SUCCEEDED, fail_count, FAILED, skip_count, SKIPPED, total_time))
|
||||
print(build_separator)
|
||||
|
||||
sys.exit(exit_status)
|
@ -245,11 +245,10 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
families = args.families
|
||||
board = args.board
|
||||
boards = args.board
|
||||
|
||||
if len(families) == 0 and len(board) == 0:
|
||||
print("Please specify family or board to fetch")
|
||||
return
|
||||
if len(families) == 0 and len(boards) == 0:
|
||||
print("Warning: family and board are not specified, only fetching mandatory dependencies.")
|
||||
|
||||
status = 0
|
||||
deps = list(deps_mandatory.keys())
|
||||
@ -258,8 +257,8 @@ def main():
|
||||
deps += deps_optional.keys()
|
||||
else:
|
||||
families = list(families)
|
||||
if board is not None:
|
||||
for b in board:
|
||||
if boards is not None:
|
||||
for b in boards:
|
||||
f = find_family(b)
|
||||
if f is not None:
|
||||
families.append(f)
|
||||
|
Loading…
x
Reference in New Issue
Block a user