Added new scripts to CI results

- Added to GitHub statuses (61 results)

- Reworked generated release table to include these (16 results, only thumb)

These also required a surprisingly large number of other changes:

- Bumbed CI Ubuntu version 18.04 -> 20.04, 22.04 is already on the
  horizon but not usable in GitHub yet

- Manualy upgrade to GCC v10, this is required for the -fcallgraph-info
  flag that scripts/stack.py uses.

- Increased paginated status queries to 100 per-page. If we have more
  statuses than this the status diffs may get much more complicated...

- Forced whitespace in generated release table to always be nbsp. GitHub
  tables get scrunched rather ugly without this, prefering margins to
  readable tables.

- Added limited support for "∞" results, since this is returned by
  ./scripts/stack.py for recursive functions.

As a side-note, this increases the number of statuses reported
per-commit from 6 to 61, so hopefully that doesn't cause any problems...
This commit is contained in:
Christopher Haster
2022-03-11 17:34:51 -06:00
parent 7ea2b515aa
commit 9d54603ce2
4 changed files with 143 additions and 148 deletions

View File

@@ -6,7 +6,7 @@ on:
jobs: jobs:
post-release: post-release:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
steps: steps:
# trigger post-release in dependency repo, this indirection allows the # trigger post-release in dependency repo, this indirection allows the
# dependency repo to be updated often without affecting this repo. At # dependency repo to be updated often without affecting this repo. At

View File

@@ -7,7 +7,7 @@ on:
jobs: jobs:
release: release:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
# need to manually check for a couple things # need to manually check for a couple things
# - tests passed? # - tests passed?
@@ -73,89 +73,70 @@ jobs:
# previous results to compare against? # previous results to compare against?
[ -n "$LFS_PREV_VERSION" ] && curl -sS \ [ -n "$LFS_PREV_VERSION" ] && curl -sS \
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/` "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/`
`status/$LFS_PREV_VERSION" \ `status/$LFS_PREV_VERSION?per_page=100" \
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]' \ | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]' \
>> prev-results.json \ >> prev-results.json \
|| true || true
# unfortunately these each have their own format # build table for GitHub
[ -e results/code-thumb.csv ] && ( \ echo "<table>" >> results.txt
export PREV="$(jq -re ' echo "<thead>" >> results.txt
select(.context == "results / code").description echo "<tr>" >> results.txt
| capture("Code size is (?<result>[0-9]+)").result' \ echo "<th align=left>Configuration</th>" >> results.txt
prev-results.json || echo 0)" for r in Code Stack Structs Coverage
./scripts/code.py -u results/code-thumb.csv --summary | awk ' do
NR==2 {printf "Code size,%d B",$2} echo "<th align=right>$r</th>" >> results.txt
NR==2 && ENVIRON["PREV"]+0 != 0 { done
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} echo "</tr>" >> results.txt
NR==2 {printf "\n"}' \ echo "</thead>" >> results.txt
>> results.csv)
[ -e results/code-thumb-readonly.csv ] && ( \ echo "<tbody>" >> results.txt
export PREV="$(jq -re ' for c in "" readonly threadsafe migrate error-asserts
select(.context == "results / code (readonly)").description do
| capture("Code size is (?<result>[0-9]+)").result' \ echo "<tr>" >> results.txt
prev-results.json || echo 0)" c_or_default=${c:-default}
./scripts/code.py -u results/code-thumb-readonly.csv --summary | awk ' echo "<td align=left>${c_or_default^}</td>" >> results.txt
NR==2 {printf "Code size<br/>(readonly),%d B",$2} for r in code stack structs
NR==2 && ENVIRON["PREV"]+0 != 0 { do
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} # per-config results
NR==2 {printf "\n"}' \ echo "<td align=right>" >> results.txt
>> results.csv) [ -e results/thumb${c:+-$c}.csv ] && ( \
[ -e results/code-thumb-threadsafe.csv ] && ( \ export PREV="$(jq -re '
export PREV="$(jq -re ' select(.context == "'"results (thumb${c:+, $c}) / $r"'").description
select(.context == "results / code (threadsafe)").description | capture("(?<result>[0-9∞]+)").result' \
| capture("Code size is (?<result>[0-9]+)").result' \ prev-results.json || echo 0)"
prev-results.json || echo 0)" ./scripts/summary.py results/thumb${c:+-$c}.csv -f $r -Y | awk '
./scripts/code.py -u results/code-thumb-threadsafe.csv --summary | awk ' NR==2 {printf "%s B",$2}
NR==2 {printf "Code size<br/>(threadsafe),%d B",$2} NR==2 && ENVIRON["PREV"]+0 != 0 {
NR==2 && ENVIRON["PREV"]+0 != 0 { printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} NR==2 {printf "\n"}' \
NR==2 {printf "\n"}' \ | sed -e 's/ /\&nbsp;/g' \
>> results.csv) >> results.txt)
[ -e results/code-thumb-migrate.csv ] && ( \ echo "</td>" >> results.txt
export PREV="$(jq -re ' done
select(.context == "results / code (migrate)").description # coverage results
| capture("Code size is (?<result>[0-9]+)").result' \ if [ -z $c ]
prev-results.json || echo 0)" then
./scripts/code.py -u results/code-thumb-migrate.csv --summary | awk ' echo "<td rowspan=0 align=right>" >> results.txt
NR==2 {printf "Code size<br/>(migrate),%d B",$2} [ -e results/coverage.csv ] && ( \
NR==2 && ENVIRON["PREV"]+0 != 0 { export PREV="$(jq -re '
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} select(.context == "results / coverage").description
NR==2 {printf "\n"}' \ | capture("(?<result>[0-9\\.]+)").result' \
>> results.csv) prev-results.json || echo 0)"
[ -e results/code-thumb-error-asserts.csv ] && ( \ ./scripts/coverage.py -u results/coverage.csv -Y | awk -F '[ /%]+' '
export PREV="$(jq -re ' NR==2 {printf "%.1f%% of %d lines",$4,$3}
select(.context == "results / code (error-asserts)").description NR==2 && ENVIRON["PREV"]+0 != 0 {
| capture("Code size is (?<result>[0-9]+)").result' \ printf " (%+.1f%%)",$4-ENVIRON["PREV"]}
prev-results.json || echo 0)" NR==2 {printf "\n"}' \
./scripts/code.py -u results/code-thumb-error-asserts.csv --summary | awk ' | sed -e 's/ /\&nbsp;/g' \
NR==2 {printf "Code size<br/>(error-asserts),%d B",$2} >> results.txt)
NR==2 && ENVIRON["PREV"]+0 != 0 { echo "</td>" >> results.txt
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} fi
NR==2 {printf "\n"}' \ echo "</tr>" >> results.txt
>> results.csv) done
[ -e results/coverage.csv ] && ( \ echo "</tbody>" >> results.txt
export PREV="$(jq -re ' echo "</table>" >> results.txt
select(.context == "results / coverage").description
| capture("Coverage is (?<result>[0-9\\.]+)").result' \
prev-results.json || echo 0)"
./scripts/coverage.py -u results/coverage.csv --summary | awk -F '[ /%]+' '
NR==2 {printf "Coverage,%.1f%% of %d lines",$4,$3}
NR==2 && ENVIRON["PREV"]+0 != 0 {
printf " (%+.1f%%)",$4-ENVIRON["PREV"]}
NR==2 {printf "\n"}' \
>> results.csv)
# transpose to GitHub table
[ -e results.csv ] || exit 0
awk -F ',' '
{label[NR]=$1; value[NR]=$2}
END {
for (r=1; r<=NR; r++) {printf "| %s ",label[r]}; printf "|\n";
for (r=1; r<=NR; r++) {printf "|:--"}; printf "|\n";
for (r=1; r<=NR; r++) {printf "| %s ",value[r]}; printf "|\n"}' \
results.csv > results.txt
echo "RESULTS:"
cat results.txt cat results.txt
# find changes from history # find changes from history

View File

@@ -6,7 +6,7 @@ on:
jobs: jobs:
status: status:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
steps: steps:
# custom statuses? # custom statuses?
- uses: dawidd6/action-download-artifact@v2 - uses: dawidd6/action-download-artifact@v2

View File

@@ -8,7 +8,7 @@ env:
jobs: jobs:
# run tests # run tests
test: test:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -18,11 +18,11 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: install - name: install
run: | run: |
# need toml, also pip3 isn't installed by default? # need a few additional tools
sudo apt-get update -qq sudo apt-get update -qq
sudo apt-get install -qq python3 python3-pip lcov sudo apt-get install -qq python3 python3-pip lcov
sudo pip3 install toml sudo pip3 install toml
gcc --version python3 --version
# setup a ram-backed disk to speed up reentrant tests # setup a ram-backed disk to speed up reentrant tests
mkdir disks mkdir disks
@@ -36,41 +36,49 @@ jobs:
echo "TESTFLAGS=$TESTFLAGS" >> $GITHUB_ENV echo "TESTFLAGS=$TESTFLAGS" >> $GITHUB_ENV
# we're not cross-compiling with x86, but we do need the newest
# version of gcc for the -fcallgraph-info=su flag
- name: install-x86_64
if: ${{matrix.arch == 'x86_64'}}
run: |
sudo apt-get install -qq gcc-10
echo "CC=gcc-10" >> $GITHUB_ENV
gcc-10 --version
# cross-compile with ARM Thumb (32-bit, little-endian) # cross-compile with ARM Thumb (32-bit, little-endian)
- name: install-thumb - name: install-thumb
if: ${{matrix.arch == 'thumb'}} if: ${{matrix.arch == 'thumb'}}
run: | run: |
sudo apt-get install -qq \ sudo apt-get install -qq \
gcc-arm-linux-gnueabi \ gcc-10-arm-linux-gnueabi \
libc6-dev-armel-cross \ libc6-dev-armel-cross \
qemu-user qemu-user
echo "CC=arm-linux-gnueabi-gcc -mthumb --static" >> $GITHUB_ENV echo "CC=arm-linux-gnueabi-gcc-10 -mthumb --static" >> $GITHUB_ENV
echo "EXEC=qemu-arm" >> $GITHUB_ENV echo "EXEC=qemu-arm" >> $GITHUB_ENV
arm-linux-gnueabi-gcc --version arm-linux-gnueabi-gcc-10 --version
qemu-arm -version qemu-arm -version
# cross-compile with MIPS (32-bit, big-endian) # cross-compile with MIPS (32-bit, big-endian)
- name: install-mips - name: install-mips
if: ${{matrix.arch == 'mips'}} if: ${{matrix.arch == 'mips'}}
run: | run: |
sudo apt-get install -qq \ sudo apt-get install -qq \
gcc-mips-linux-gnu \ gcc-10-mips-linux-gnu \
libc6-dev-mips-cross \ libc6-dev-mips-cross \
qemu-user qemu-user
echo "CC=mips-linux-gnu-gcc --static" >> $GITHUB_ENV echo "CC=mips-linux-gnu-gcc-10 --static" >> $GITHUB_ENV
echo "EXEC=qemu-mips" >> $GITHUB_ENV echo "EXEC=qemu-mips" >> $GITHUB_ENV
mips-linux-gnu-gcc --version mips-linux-gnu-gcc-10 --version
qemu-mips -version qemu-mips -version
# cross-compile with PowerPC (32-bit, big-endian) # cross-compile with PowerPC (32-bit, big-endian)
- name: install-powerpc - name: install-powerpc
if: ${{matrix.arch == 'powerpc'}} if: ${{matrix.arch == 'powerpc'}}
run: | run: |
sudo apt-get install -qq \ sudo apt-get install -qq \
gcc-powerpc-linux-gnu \ gcc-10-powerpc-linux-gnu \
libc6-dev-powerpc-cross \ libc6-dev-powerpc-cross \
qemu-user qemu-user
echo "CC=powerpc-linux-gnu-gcc --static" >> $GITHUB_ENV echo "CC=powerpc-linux-gnu-gcc-10 --static" >> $GITHUB_ENV
echo "EXEC=qemu-ppc" >> $GITHUB_ENV echo "EXEC=qemu-ppc" >> $GITHUB_ENV
powerpc-linux-gnu-gcc --version powerpc-linux-gnu-gcc-10 --version
qemu-ppc -version qemu-ppc -version
# make sure example can at least compile # make sure example can at least compile
@@ -148,102 +156,108 @@ jobs:
retention-days: 1 retention-days: 1
# update results # update results
- name: results-code - name: results
run: | run: |
mkdir -p results mkdir -p results
make clean make clean
make code \ make lfs.csv \
CFLAGS+=" \ CFLAGS+=" \
-DLFS_NO_ASSERT \ -DLFS_NO_ASSERT \
-DLFS_NO_DEBUG \ -DLFS_NO_DEBUG \
-DLFS_NO_WARN \ -DLFS_NO_WARN \
-DLFS_NO_ERROR" \ -DLFS_NO_ERROR"
CODEFLAGS+="-o results/code-${{matrix.arch}}.csv" cp lfs.csv results/${{matrix.arch}}.csv
- name: results-code-readonly ./scripts/summary.py results/${{matrix.arch}}.csv
- name: results-readonly
run: | run: |
mkdir -p results mkdir -p results
make clean make clean
make code \ make lfs.csv \
CFLAGS+=" \ CFLAGS+=" \
-DLFS_NO_ASSERT \ -DLFS_NO_ASSERT \
-DLFS_NO_DEBUG \ -DLFS_NO_DEBUG \
-DLFS_NO_WARN \ -DLFS_NO_WARN \
-DLFS_NO_ERROR \ -DLFS_NO_ERROR \
-DLFS_READONLY" \ -DLFS_READONLY"
CODEFLAGS+="-o results/code-${{matrix.arch}}-readonly.csv" cp lfs.csv results/${{matrix.arch}}-readonly.csv
- name: results-code-threadsafe ./scripts/summary.py results/${{matrix.arch}}-readonly.csv
- name: results-threadsafe
run: | run: |
mkdir -p results mkdir -p results
make clean make clean
make code \ make lfs.csv \
CFLAGS+=" \ CFLAGS+=" \
-DLFS_NO_ASSERT \ -DLFS_NO_ASSERT \
-DLFS_NO_DEBUG \ -DLFS_NO_DEBUG \
-DLFS_NO_WARN \ -DLFS_NO_WARN \
-DLFS_NO_ERROR \ -DLFS_NO_ERROR \
-DLFS_THREADSAFE" \ -DLFS_THREADSAFE"
CODEFLAGS+="-o results/code-${{matrix.arch}}-threadsafe.csv" cp lfs.csv results/${{matrix.arch}}-threadsafe.csv
- name: results-code-migrate ./scripts/summary.py results/${{matrix.arch}}-threadsafe.csv
- name: results-migrate
run: | run: |
mkdir -p results mkdir -p results
make clean make clean
make code \ make lfs.csv \
CFLAGS+=" \ CFLAGS+=" \
-DLFS_NO_ASSERT \ -DLFS_NO_ASSERT \
-DLFS_NO_DEBUG \ -DLFS_NO_DEBUG \
-DLFS_NO_WARN \ -DLFS_NO_WARN \
-DLFS_NO_ERROR \ -DLFS_NO_ERROR \
-DLFS_MIGRATE" \ -DLFS_MIGRATE"
CODEFLAGS+="-o results/code-${{matrix.arch}}-migrate.csv" cp lfs.csv results/${{matrix.arch}}-migrate.csv
- name: results-code-error-asserts ./scripts/summary.py results/${{matrix.arch}}-migrate.csv
- name: results-error-asserts
run: | run: |
mkdir -p results mkdir -p results
make clean make clean
make code \ make lfs.csv \
CFLAGS+=" \ CFLAGS+=" \
-DLFS_NO_DEBUG \ -DLFS_NO_DEBUG \
-DLFS_NO_WARN \ -DLFS_NO_WARN \
-DLFS_NO_ERROR \ -DLFS_NO_ERROR \
-D'LFS_ASSERT(test)=do {if(!(test)) {return -1;}} while(0)'" \ -D'LFS_ASSERT(test)=do {if(!(test)) {return -1;}} while(0)'"
CODEFLAGS+="-o results/code-${{matrix.arch}}-error-asserts.csv" cp lfs.csv results/${{matrix.arch}}-error-asserts.csv
./scripts/summary.py results/${{matrix.arch}}-error-asserts.csv
- name: upload-results - name: upload-results
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: results name: results
path: results path: results
# limit reporting to Thumb, otherwise there would be too many numbers
# flying around for the results to be easily readable # create statuses with results
- name: collect-status - name: collect-status
if: ${{matrix.arch == 'thumb'}}
run: | run: |
mkdir -p status mkdir -p status
for f in $(shopt -s nullglob ; echo results/code*.csv) for f in $(shopt -s nullglob ; echo results/*.csv)
do do
export STEP="results-code$( export STEP="results$(
echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p')" echo $f | sed -n 's/[^-]*-\(.*\).csv/-\1/p')"
export CONTEXT="results / code$( for r in code stack structs
echo $f | sed -n 's/.*code-.*-\(.*\).csv/ (\1)/p')" do
export PREV="$(curl -sS \ export CONTEXT="results (${{matrix.arch}}$(
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ echo $f | sed -n 's/[^-]*-\(.*\).csv/, \1/p')) / $r"
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[] export PREV="$(curl -sS \
| select(.context == env.CONTEXT).description "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master?per_page=100" \
| capture("Code size is (?<result>[0-9]+)").result' \ | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
|| echo 0)" | select(.context == env.CONTEXT).description
export DESCRIPTION="$(./scripts/code.py -u $f --summary | awk ' | capture("(?<result>[0-9∞]+)").result' \
NR==2 {printf "Code size is %d B",$2} || echo 0)"
NR==2 && ENVIRON["PREV"]+0 != 0 { export DESCRIPTION="$(./scripts/summary.py $f -f $r -Y | awk '
printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')" NR==2 {printf "%s B",$2}
jq -n '{ NR==2 && ENVIRON["PREV"]+0 != 0 {
state: "success", printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')"
context: env.CONTEXT, jq -n '{
description: env.DESCRIPTION, state: "success",
target_job: "${{github.job}} (${{matrix.arch}})", context: env.CONTEXT,
target_step: env.STEP}' \ description: env.DESCRIPTION,
| tee status/code$( target_job: "${{github.job}} (${{matrix.arch}})",
echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p').json target_step: env.STEP}' \
| tee status/$r-${{matrix.arch}}$(
echo $f | sed -n 's/[^-]*-\(.*\).csv/-\1/p').json
done
done done
- name: upload-status - name: upload-status
if: ${{matrix.arch == 'thumb'}}
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: status name: status
@@ -252,7 +266,7 @@ jobs:
# run under Valgrind to check for memory errors # run under Valgrind to check for memory errors
valgrind: valgrind:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: install - name: install
@@ -272,7 +286,7 @@ jobs:
# self-host with littlefs-fuse for a fuzz-like test # self-host with littlefs-fuse for a fuzz-like test
fuse: fuse:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
if: ${{!endsWith(github.ref, '-prefix')}} if: ${{!endsWith(github.ref, '-prefix')}}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -318,7 +332,7 @@ jobs:
# test migration using littlefs-fuse # test migration using littlefs-fuse
migrate: migrate:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
if: ${{!endsWith(github.ref, '-prefix')}} if: ${{!endsWith(github.ref, '-prefix')}}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -385,7 +399,7 @@ jobs:
# collect coverage info # collect coverage info
coverage: coverage:
runs-on: ubuntu-18.04 runs-on: ubuntu-20.04
needs: [test] needs: [test]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -421,14 +435,14 @@ jobs:
export STEP="results-coverage" export STEP="results-coverage"
export CONTEXT="results / coverage" export CONTEXT="results / coverage"
export PREV="$(curl -sS \ export PREV="$(curl -sS \
"$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master?per_page=100" \
| jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[] | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
| select(.context == env.CONTEXT).description | select(.context == env.CONTEXT).description
| capture("Coverage is (?<result>[0-9\\.]+)").result' \ | capture("(?<result>[0-9\\.]+)").result' \
|| echo 0)" || echo 0)"
export DESCRIPTION="$( export DESCRIPTION="$(
./scripts/coverage.py -u results/coverage.csv --summary | awk -F '[ /%]+' ' ./scripts/coverage.py -u results/coverage.csv -Y | awk -F '[ /%]+' '
NR==2 {printf "Coverage is %.1f%% of %d lines",$4,$3} NR==2 {printf "%.1f%% of %d lines",$4,$3}
NR==2 && ENVIRON["PREV"]+0 != 0 { NR==2 && ENVIRON["PREV"]+0 != 0 {
printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')" printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')"
jq -n '{ jq -n '{