mirror of
				https://github.com/eledio-devices/thirdparty-littlefs.git
				synced 2025-10-31 08:42:40 +01:00 
			
		
		
		
	Added post-release script, cleaned up workflows
This helps an outstanding maintainer annoyance: updating dependencies to bring in new versions on each littlefs release. But instead of adding a bunch of scripts to the tail end of the release workflow, the post-release script just triggers a single "repository_dispatch" event in the newly created littlefs.post-release repo. From there any number of post-release workflows can be run. This indirection should let the post-release scripts move much quicker than littlefs itself, which helps offset how fragile these sort of scripts are. --- Also finished cleaning up the workflows now that they are mostly working.
This commit is contained in:
		
							
								
								
									
										26
									
								
								.github/workflows/post-release.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								.github/workflows/post-release.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | |||||||
|  | name: post-release | ||||||
|  | on: | ||||||
|  |   release: | ||||||
|  |     branches: [master] | ||||||
|  |     types: [released] | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   post-release: | ||||||
|  |     runs-on: ubuntu-18.04 | ||||||
|  |     steps: | ||||||
|  |       # trigger post-release in dependency repo, this indirection allows the | ||||||
|  |       # dependency repo to be updated often without affecting this repo. At | ||||||
|  |       # the time of this comment, the dependency repo is responsible for | ||||||
|  |       # creating PRs for other dependent repos post-release. | ||||||
|  |       - name: trigger-post-release | ||||||
|  |         continue-on-error: true | ||||||
|  |         run: | | ||||||
|  |           curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \ | ||||||
|  |             "$GITHUB_API_URL/repos/${{secrets.POST_RELEASE_REPO}}/dispatches" \ | ||||||
|  |             -d "$(jq -n '{ | ||||||
|  |               event_type: "post-release", | ||||||
|  |               client_payload: { | ||||||
|  |                 repo: env.GITHUB_REPOSITORY, | ||||||
|  |                 version: "${{github.event.release.tag_name}}"}}' \ | ||||||
|  |               | tee /dev/stderr)" | ||||||
|  |  | ||||||
							
								
								
									
										125
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										125
									
								
								.github/workflows/release.yml
									
									
									
									
										vendored
									
									
								
							| @@ -7,14 +7,13 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   release: |   release: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|  |  | ||||||
|     # need to manually check for a couple things |     # need to manually check for a couple things | ||||||
|     # - tests passed? |     # - tests passed? | ||||||
|     # - we are the most recent commit on master? |     # - we are the most recent commit on master? | ||||||
|     if: | |     if: ${{github.event.workflow_run.conclusion == 'success' && | ||||||
|       github.event.workflow_run.conclusion == 'success' && |       github.event.workflow_run.head_sha == github.sha}} | ||||||
|       github.event.workflow_run.head_sha == github.sha |  | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
| @@ -71,33 +70,78 @@ jobs: | |||||||
|       # try to find results from tests |       # try to find results from tests | ||||||
|       - name: collect-results |       - name: collect-results | ||||||
|         run: | |         run: | | ||||||
|           [ -e results/code-thumb.csv ] && \ |           # previous results to compare against? | ||||||
|             ./scripts/code.py -u results/code-thumb.csv -s \ |           [ -n "$LFS_PREV_VERSION" ] && curl -sS \ | ||||||
|             | awk 'NR==2 {printf "Code size,%d B\n",$2}' \ |             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/` | ||||||
|             >> results.csv |               `status/$LFS_PREV_VERSION" \ | ||||||
|           [ -e results/code-thumb-readonly.csv ] && \ |             | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]' \ | ||||||
|             ./scripts/code.py -u results/code-thumb-readonly.csv -s \ |             >> prev-results.json \ | ||||||
|             | awk 'NR==2 {printf "Code size (readonly),%d B\n",$2}' \ |             || true | ||||||
|             >> results.csv |  | ||||||
|           [ -e results/code-thumb-threadsafe.csv ] && \ |  | ||||||
|             ./scripts/code.py -u results/code-thumb-threadsafe.csv -s \ |  | ||||||
|             | awk 'NR==2 {printf "Code size (threadsafe),%d B\n",$2}' \ |  | ||||||
|             >> results.csv |  | ||||||
|           [ -e results/code-thumb-migrate.csv ] && \ |  | ||||||
|             ./scripts/code.py -u results/code-thumb-migrate.csv -s \ |  | ||||||
|             | awk 'NR==2 {printf "Code size (migrate),%d B\n",$2}' \ |  | ||||||
|             >> results.csv |  | ||||||
|           [ -e results/coverage.csv ] && \ |  | ||||||
|             ./scripts/coverage.py -u results/coverage.csv -s \ |  | ||||||
|             | awk 'NR==2 {printf "Coverage,%.1f%% of %d lines\n",$4,$3}' \ |  | ||||||
|             >> results.csv |  | ||||||
|  |  | ||||||
|  |           # unfortunately these each have their own format | ||||||
|  |           [ -e results/code-thumb.csv ] && ( \ | ||||||
|  |             export PREV="$(jq -re ' | ||||||
|  |                   select(.context == "results / code").description | ||||||
|  |                   | capture("Code size is (?<result>[0-9]+)").result' \ | ||||||
|  |                 prev-results.json || echo 0)" | ||||||
|  |             ./scripts/code.py -u results/code-thumb.csv -s | awk ' | ||||||
|  |               NR==2 {printf "Code size,%d B",$2} | ||||||
|  |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|  |                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} | ||||||
|  |               NR==2 {printf "\n"}' \ | ||||||
|  |             >> results.csv) | ||||||
|  |           [ -e results/code-thumb-readonly.csv ] && ( \ | ||||||
|  |             export PREV="$(jq -re ' | ||||||
|  |                   select(.context == "results / code (readonly)").description | ||||||
|  |                   | capture("Code size is (?<result>[0-9]+)").result' \ | ||||||
|  |                 prev-results.json || echo 0)" | ||||||
|  |             ./scripts/code.py -u results/code-thumb-readonly.csv -s | awk ' | ||||||
|  |               NR==2 {printf "Code size (readonly),%d B",$2} | ||||||
|  |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|  |                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} | ||||||
|  |               NR==2 {printf "\n"}' \ | ||||||
|  |             >> results.csv) | ||||||
|  |           [ -e results/code-thumb-threadsafe.csv ] && ( \ | ||||||
|  |             export PREV="$(jq -re ' | ||||||
|  |                   select(.context == "results / code (threadsafe)").description | ||||||
|  |                   | capture("Code size is (?<result>[0-9]+)").result' \ | ||||||
|  |                 prev-results.json || echo 0)" | ||||||
|  |             ./scripts/code.py -u results/code-thumb-threadsafe.csv -s | awk ' | ||||||
|  |               NR==2 {printf "Code size (threadsafe),%d B",$2} | ||||||
|  |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|  |                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} | ||||||
|  |               NR==2 {printf "\n"}' \ | ||||||
|  |             >> results.csv) | ||||||
|  |           [ -e results/code-thumb-migrate.csv ] && ( \ | ||||||
|  |             export PREV="$(jq -re ' | ||||||
|  |                   select(.context == "results / code (migrate)").description | ||||||
|  |                   | capture("Code size is (?<result>[0-9]+)").result' \ | ||||||
|  |                 prev-results.json || echo 0)" | ||||||
|  |             ./scripts/code.py -u results/code-thumb-migrate.csv -s | awk ' | ||||||
|  |               NR==2 {printf "Code size (migrate),%d B",$2} | ||||||
|  |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|  |                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]} | ||||||
|  |               NR==2 {printf "\n"}' \ | ||||||
|  |             >> results.csv) | ||||||
|  |           [ -e results/coverage.csv ] && ( \ | ||||||
|  |             export PREV="$(jq -re ' | ||||||
|  |                   select(.context == "results / coverage").description | ||||||
|  |                   | capture("Coverage is (?<result>[0-9\\.]+)").result' \ | ||||||
|  |                 prev-results.json || echo 0)" | ||||||
|  |             ./scripts/coverage.py -u results/coverage.csv -s | awk -F '[ /%]+' ' | ||||||
|  |               NR==2 {printf "Coverage,%.1f%% of %d lines",$4,$3} | ||||||
|  |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|  |                 printf " (%+.1f%%)",$4-ENVIRON["PREV"]} | ||||||
|  |               NR==2 {printf "\n"}' \ | ||||||
|  |             >> results.csv) | ||||||
|  |  | ||||||
|  |           # transpose to GitHub table | ||||||
|           [ -e results.csv ] || exit 0 |           [ -e results.csv ] || exit 0 | ||||||
|           awk -F ',' ' |           awk -F ',' ' | ||||||
|             {label[NR]=$1; value[NR]=$2} |             {label[NR]=$1; value[NR]=$2} | ||||||
|             END { |             END { | ||||||
|               for (r=1; r<=NR; r++) {printf "| %s ",label[r]}; printf "|\n"; |               for (r=1; r<=NR; r++) {printf "| %s ",label[r]}; printf "|\n"; | ||||||
|               for (r=1; r<=NR; r++) {printf "|--:"}; printf "|\n"; |               for (r=1; r<=NR; r++) {printf "|:--"}; printf "|\n"; | ||||||
|               for (r=1; r<=NR; r++) {printf "| %s ",value[r]}; printf "|\n"}' \ |               for (r=1; r<=NR; r++) {printf "| %s ",value[r]}; printf "|\n"}' \ | ||||||
|             results.csv > results.txt |             results.csv > results.txt | ||||||
|           echo "RESULTS:" |           echo "RESULTS:" | ||||||
| @@ -106,20 +150,25 @@ jobs: | |||||||
|       # find changes from history |       # find changes from history | ||||||
|       - name: collect-changes |       - name: collect-changes | ||||||
|         run: | |         run: | | ||||||
|           [ ! -z "$LFS_PREV_VERSION" ] || exit 0 |           [ -n "$LFS_PREV_VERSION" ] || exit 0 | ||||||
|           git log --oneline "$LFS_PREV_VERSION.." \ |           # use explicit link to github commit so that release notes can | ||||||
|             --grep='^Merge' --invert-grep > changes.txt |           # be copied elsewhere | ||||||
|  |           git log "$LFS_PREV_VERSION.." \ | ||||||
|  |             --grep='^Merge' --invert-grep \ | ||||||
|  |             --format="format:[\`%h\`](` | ||||||
|  |               `https://github.com/$GITHUB_REPOSITORY/commit/%h) %s" \ | ||||||
|  |             > changes.txt | ||||||
|           echo "CHANGES:" |           echo "CHANGES:" | ||||||
|           cat changes.txt |           cat changes.txt | ||||||
|  |  | ||||||
|       # create and update major branches (vN and vN-prefix) |       # create and update major branches (vN and vN-prefix) | ||||||
|       - name: build-major-branches |       - name: create-major-branches | ||||||
|         run: | |         run: | | ||||||
|           # create major branch |           # create major branch | ||||||
|           git branch "v$LFS_VERSION_MAJOR" HEAD |           git branch "v$LFS_VERSION_MAJOR" HEAD | ||||||
|  |  | ||||||
|           # create major prefix branch |           # create major prefix branch | ||||||
|           git config user.name ${{secrets.BOT_USERNAME}} |           git config user.name ${{secrets.BOT_USER}} | ||||||
|           git config user.email ${{secrets.BOT_EMAIL}} |           git config user.email ${{secrets.BOT_EMAIL}} | ||||||
|           git fetch "https://github.com/$GITHUB_REPOSITORY.git" \ |           git fetch "https://github.com/$GITHUB_REPOSITORY.git" \ | ||||||
|             "v$LFS_VERSION_MAJOR-prefix" || true |             "v$LFS_VERSION_MAJOR-prefix" || true | ||||||
| @@ -137,27 +186,19 @@ jobs: | |||||||
|             "v$LFS_VERSION_MAJOR-prefix" |             "v$LFS_VERSION_MAJOR-prefix" | ||||||
|  |  | ||||||
|       # build release notes |       # build release notes | ||||||
|       - name: build-release |       - name: create-release | ||||||
|         run: | |         run: | | ||||||
|           # find changes since last release |  | ||||||
|           #if [ ! -z "$LFS_PREV_VERSION" ] |  | ||||||
|           #then |  | ||||||
|           #  export CHANGES="$(git log --oneline "$LFS_PREV_VERSION.." \ |  | ||||||
|           #    --grep='^Merge' --invert-grep)" |  | ||||||
|           #  printf "CHANGES\n%s\n\n" "$CHANGES" |  | ||||||
|           #fi |  | ||||||
|  |  | ||||||
|           # create release and patch version tag (vN.N.N) |           # create release and patch version tag (vN.N.N) | ||||||
|           # only draft if not a patch release |           # only draft if not a patch release | ||||||
|           [ -e results.txt ] && export RESULTS="$(cat results.txt)" |           [ -e results.txt ] && export RESULTS="$(cat results.txt)" | ||||||
|           [ -e changes.txt ] && export CHANGES="$(cat changes.txt)" |           [ -e changes.txt ] && export CHANGES="$(cat changes.txt)" | ||||||
|           curl -sS -H "authorization: token ${{secrets.BOT_TOKEN}}" \ |           curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \ | ||||||
|             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/releases" \ |             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/releases" \ | ||||||
|             -d "$(jq -sR '{ |             -d "$(jq -n '{ | ||||||
|               tag_name: env.LFS_VERSION, |               tag_name: env.LFS_VERSION, | ||||||
|               name: env.LFS_VERSION | rtrimstr(".0"), |               name: env.LFS_VERSION | rtrimstr(".0"), | ||||||
|               target_commitish: "${{github.event.workflow_run.head_sha}}", |               target_commitish: "${{github.event.workflow_run.head_sha}}", | ||||||
|               draft: env.LFS_VERSION | endswith(".0"), |               draft: env.LFS_VERSION | endswith(".0"), | ||||||
|               body: [env.RESULTS, env.CHANGES | select(.)] | join("\n\n")}' \ |               body: [env.RESULTS, env.CHANGES | select(.)] | join("\n\n")}' \ | ||||||
|               | tee /dev/stderr)" > /dev/null |               | tee /dev/stderr)" | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										59
									
								
								.github/workflows/status.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										59
									
								
								.github/workflows/status.yml
									
									
									
									
										vendored
									
									
								
							| @@ -6,30 +6,21 @@ on: | |||||||
|  |  | ||||||
| jobs: | jobs: | ||||||
|   status: |   status: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     continue-on-error: true |  | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|       - run: echo "${{toJSON(github.event.workflow_run)}}" |  | ||||||
|  |  | ||||||
|       # custom statuses? |       # custom statuses? | ||||||
|       - uses: dawidd6/action-download-artifact@v2 |       - uses: dawidd6/action-download-artifact@v2 | ||||||
|  |         continue-on-error: true | ||||||
|         with: |         with: | ||||||
|           workflow: ${{github.event.workflow_run.name}} |           workflow: ${{github.event.workflow_run.name}} | ||||||
|           run_id: ${{github.event.workflow_run.id}} |           run_id: ${{github.event.workflow_run.id}} | ||||||
|           name: status |           name: status | ||||||
|           path: status |           path: status | ||||||
|       - name: update-status |       - name: update-status | ||||||
|  |         continue-on-error: true | ||||||
|         run: | |         run: | | ||||||
|           # TODO remove this |  | ||||||
|           ls status |           ls status | ||||||
|           for f in status/*.json |           for s in $(shopt -s nullglob ; echo status/*.json) | ||||||
|           do |  | ||||||
|             cat $f |  | ||||||
|           done |  | ||||||
|  |  | ||||||
|           shopt -s nullglob |  | ||||||
|           for s in status/*.json |  | ||||||
|           do |           do | ||||||
|             # parse requested status |             # parse requested status | ||||||
|             export STATE="$(jq -er '.state' $s)" |             export STATE="$(jq -er '.state' $s)" | ||||||
| @@ -51,46 +42,14 @@ jobs: | |||||||
|                       + ((.steps[] |                       + ((.steps[] | ||||||
|                         | select(.name == env.TARGET_STEP) |                         | select(.name == env.TARGET_STEP) | ||||||
|                         | "#step:\(.number):0") // "")'))" |                         | "#step:\(.number):0") // "")'))" | ||||||
|             # TODO remove this |  | ||||||
|             # print for debugging |  | ||||||
|             echo "$(jq -nc '{ |  | ||||||
|                 state: env.STATE, |  | ||||||
|                 context: env.CONTEXT, |  | ||||||
|                 description: env.DESCRIPTION, |  | ||||||
|                 target_url: env.TARGET_URL}')" |  | ||||||
|             # update status |             # update status | ||||||
|             curl -sS -H "authorization: token ${{secrets.BOT_TOKEN}}" \ |             curl -sS -X POST -H "authorization: token ${{secrets.BOT_TOKEN}}" \ | ||||||
|               -X POST "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/` |               "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/statuses/` | ||||||
|                 `${{github.event.workflow_run.head_sha}}" \ |                 `${{github.event.workflow_run.head_sha}}" \ | ||||||
|               -d "$(jq -nc '{ |               -d "$(jq -n '{ | ||||||
|                 state: env.STATE, |                 state: env.STATE, | ||||||
|                 context: env.CONTEXT, |                 context: env.CONTEXT, | ||||||
|                 description: env.DESCRIPTION, |                 description: env.DESCRIPTION, | ||||||
|                 target_url: env.TARGET_URL}')" |                 target_url: env.TARGET_URL}' \ | ||||||
|  |                 | tee /dev/stderr)" | ||||||
|             #if jq -er '.target_url' $s |  | ||||||
|             #then |  | ||||||
|             #  export TARGET_URL="$(jq -er '.target_url' $s)" |  | ||||||
|             #elif jq -er '.target_job' $s |  | ||||||
|             #then |  | ||||||
|             #   |  | ||||||
|             #fi |  | ||||||
|              |  | ||||||
|           done |           done | ||||||
|  |  | ||||||
|          |  | ||||||
|  |  | ||||||
|  |  | ||||||
| #      - id: status |  | ||||||
| #        run: | |  | ||||||
| #          echo "::set-output name=description::$(cat statuses/x86_64.txt | tr '\n' ' ')" |  | ||||||
| #      - uses: octokit/request-action@v2.x |  | ||||||
| #        with: |  | ||||||
| #          route: POST /repos/{repo}/status/{sha} |  | ||||||
| #          repo: ${{github.repository}} |  | ||||||
| #          sha: ${{github.event.status.sha}} |  | ||||||
| #          context: ${{github.event.status.context}} |  | ||||||
| #          state: ${{github.event.status.state}} |  | ||||||
| #          description: ${{steps.status.outputs.description}} |  | ||||||
| #          target_url: ${{github.event.status.target_url}} |  | ||||||
| #     |  | ||||||
|   | |||||||
							
								
								
									
										174
									
								
								.github/workflows/test.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										174
									
								
								.github/workflows/test.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,7 +8,7 @@ env: | |||||||
| jobs: | jobs: | ||||||
|   # run tests |   # run tests | ||||||
|   test: |   test: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     strategy: |     strategy: | ||||||
|       fail-fast: false |       fail-fast: false | ||||||
|       matrix: |       matrix: | ||||||
| @@ -38,7 +38,7 @@ jobs: | |||||||
|  |  | ||||||
|       # cross-compile with ARM Thumb (32-bit, little-endian) |       # cross-compile with ARM Thumb (32-bit, little-endian) | ||||||
|       - name: install-thumb |       - name: install-thumb | ||||||
|         if: matrix.arch == 'thumb' |         if: ${{matrix.arch == 'thumb'}} | ||||||
|         run: | |         run: | | ||||||
|           sudo apt-get install -qq \ |           sudo apt-get install -qq \ | ||||||
|             gcc-arm-linux-gnueabi \ |             gcc-arm-linux-gnueabi \ | ||||||
| @@ -50,7 +50,7 @@ jobs: | |||||||
|           qemu-arm -version |           qemu-arm -version | ||||||
|       # cross-compile with MIPS (32-bit, big-endian) |       # cross-compile with MIPS (32-bit, big-endian) | ||||||
|       - name: install-mips |       - name: install-mips | ||||||
|         if: matrix.arch == 'mips' |         if: ${{matrix.arch == 'mips'}} | ||||||
|         run: | |         run: | | ||||||
|           sudo apt-get install -qq \ |           sudo apt-get install -qq \ | ||||||
|             gcc-mips-linux-gnu \ |             gcc-mips-linux-gnu \ | ||||||
| @@ -62,7 +62,7 @@ jobs: | |||||||
|           qemu-mips -version |           qemu-mips -version | ||||||
|       # cross-compile with PowerPC (32-bit, big-endian) |       # cross-compile with PowerPC (32-bit, big-endian) | ||||||
|       - name: install-powerpc |       - name: install-powerpc | ||||||
|         if: matrix.arch == 'powerpc' |         if: ${{matrix.arch == 'powerpc'}} | ||||||
|         run: | |         run: | | ||||||
|           sudo apt-get install -qq \ |           sudo apt-get install -qq \ | ||||||
|             gcc-powerpc-linux-gnu \ |             gcc-powerpc-linux-gnu \ | ||||||
| @@ -76,71 +76,71 @@ jobs: | |||||||
|       # make sure example can at least compile |       # make sure example can at least compile | ||||||
|       - name: test-example |       - name: test-example | ||||||
|         run: | |         run: | | ||||||
|           sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c && \ |           sed -n '/``` c/,/```/{/```/d; p}' README.md > test.c | ||||||
|           make all CFLAGS+=" \ |           make all CFLAGS+=" \ | ||||||
|             -Duser_provided_block_device_read=NULL \ |             -Duser_provided_block_device_read=NULL \ | ||||||
|             -Duser_provided_block_device_prog=NULL \ |             -Duser_provided_block_device_prog=NULL \ | ||||||
|             -Duser_provided_block_device_erase=NULL \ |             -Duser_provided_block_device_erase=NULL \ | ||||||
|             -Duser_provided_block_device_sync=NULL \ |             -Duser_provided_block_device_sync=NULL \ | ||||||
|             -include stdio.h" |             -include stdio.h" | ||||||
|  |           rm test.c | ||||||
|  |  | ||||||
| #      # test configurations |       # test configurations | ||||||
| #      # normal+reentrant tests |       # normal+reentrant tests | ||||||
| #      - name: test-default |       - name: test-default | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk" |           make test TESTFLAGS+="-nrk" | ||||||
| #      # NOR flash: read/prog = 1 block = 4KiB |       # NOR flash: read/prog = 1 block = 4KiB | ||||||
| #      - name: test-nor |       - name: test-nor | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096" |             -DLFS_READ_SIZE=1 -DLFS_BLOCK_SIZE=4096" | ||||||
| #      # SD/eMMC: read/prog = 512 block = 512 |       # SD/eMMC: read/prog = 512 block = 512 | ||||||
| #      - name: test-emmc |       - name: test-emmc | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512" |             -DLFS_READ_SIZE=512 -DLFS_BLOCK_SIZE=512" | ||||||
| #      # NAND flash: read/prog = 4KiB block = 32KiB |       # NAND flash: read/prog = 4KiB block = 32KiB | ||||||
| #      - name: test-nand |       - name: test-nand | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)" |             -DLFS_READ_SIZE=4096 -DLFS_BLOCK_SIZE=\(32*1024\)" | ||||||
| #      # other extreme geometries that are useful for various corner cases |       # other extreme geometries that are useful for various corner cases | ||||||
| #      - name: test-no-intrinsics |       - name: test-no-intrinsics | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_NO_INTRINSICS" |             -DLFS_NO_INTRINSICS" | ||||||
| #      - name: test-byte-writes |       - name: test-byte-writes | ||||||
| #        # it just takes too long to test byte-level writes when in qemu, |         # it just takes too long to test byte-level writes when in qemu, | ||||||
| #        # should be plenty covered by the other configurations |         # should be plenty covered by the other configurations | ||||||
| #        if: matrix.arch == 'x86_64' |         if: ${{matrix.arch == 'x86_64'}} | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1" |             -DLFS_READ_SIZE=1 -DLFS_CACHE_SIZE=1" | ||||||
| #      - name: test-block-cycles |       - name: test-block-cycles | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_BLOCK_CYCLES=1" |             -DLFS_BLOCK_CYCLES=1" | ||||||
| #      - name: test-odd-block-count |       - name: test-odd-block-count | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256" |             -DLFS_BLOCK_COUNT=1023 -DLFS_LOOKAHEAD_SIZE=256" | ||||||
| #      - name: test-odd-block-size |       - name: test-odd-block-size | ||||||
| #        run: | |         run: | | ||||||
| #          make clean |           make clean | ||||||
| #          make test TESTFLAGS+="-nrk \ |           make test TESTFLAGS+="-nrk \ | ||||||
| #            -DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704" |             -DLFS_READ_SIZE=11 -DLFS_BLOCK_SIZE=704" | ||||||
|  |  | ||||||
|       # upload coverage for later coverage |       # upload coverage for later coverage | ||||||
|       - name: upload-coverage |       - name: upload-coverage | ||||||
|         continue-on-error: true |  | ||||||
|         uses: actions/upload-artifact@v2 |         uses: actions/upload-artifact@v2 | ||||||
|         with: |         with: | ||||||
|           name: coverage |           name: coverage | ||||||
| @@ -149,7 +149,6 @@ jobs: | |||||||
|  |  | ||||||
|       # update results |       # update results | ||||||
|       - name: results-code |       - name: results-code | ||||||
|         continue-on-error: true |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p results |           mkdir -p results | ||||||
|           make clean |           make clean | ||||||
| @@ -161,7 +160,6 @@ jobs: | |||||||
|               -DLFS_NO_ERROR" \ |               -DLFS_NO_ERROR" \ | ||||||
|             CODEFLAGS+="-o results/code-${{matrix.arch}}.csv" |             CODEFLAGS+="-o results/code-${{matrix.arch}}.csv" | ||||||
|       - name: results-code-readonly |       - name: results-code-readonly | ||||||
|         continue-on-error: true |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p results |           mkdir -p results | ||||||
|           make clean |           make clean | ||||||
| @@ -174,7 +172,6 @@ jobs: | |||||||
|               -DLFS_READONLY" \ |               -DLFS_READONLY" \ | ||||||
|             CODEFLAGS+="-o results/code-${{matrix.arch}}-readonly.csv" |             CODEFLAGS+="-o results/code-${{matrix.arch}}-readonly.csv" | ||||||
|       - name: results-code-threadsafe |       - name: results-code-threadsafe | ||||||
|         continue-on-error: true |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p results |           mkdir -p results | ||||||
|           make clean |           make clean | ||||||
| @@ -187,7 +184,6 @@ jobs: | |||||||
|               -DLFS_THREADSAFE" \ |               -DLFS_THREADSAFE" \ | ||||||
|             CODEFLAGS+="-o results/code-${{matrix.arch}}-threadsafe.csv" |             CODEFLAGS+="-o results/code-${{matrix.arch}}-threadsafe.csv" | ||||||
|       - name: results-code-migrate |       - name: results-code-migrate | ||||||
|         continue-on-error: true |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p results |           mkdir -p results | ||||||
|           make clean |           make clean | ||||||
| @@ -200,7 +196,6 @@ jobs: | |||||||
|               -DLFS_MIGRATE" \ |               -DLFS_MIGRATE" \ | ||||||
|             CODEFLAGS+="-o results/code-${{matrix.arch}}-migrate.csv" |             CODEFLAGS+="-o results/code-${{matrix.arch}}-migrate.csv" | ||||||
|       - name: upload-results |       - name: upload-results | ||||||
|         continue-on-error: true |  | ||||||
|         uses: actions/upload-artifact@v2 |         uses: actions/upload-artifact@v2 | ||||||
|         with: |         with: | ||||||
|           name: results |           name: results | ||||||
| @@ -208,28 +203,25 @@ jobs: | |||||||
|       # limit reporting to Thumb, otherwise there would be too many numbers |       # limit reporting to Thumb, otherwise there would be too many numbers | ||||||
|       # flying around for the results to be easily readable |       # flying around for the results to be easily readable | ||||||
|       - name: collect-status |       - name: collect-status | ||||||
|         continue-on-error: true |         if: ${{matrix.arch == 'thumb'}} | ||||||
|         if: matrix.arch == 'thumb' |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p status |           mkdir -p status | ||||||
|           for f in results/code*.csv |           for f in $(shopt -s nullglob ; echo results/code*.csv) | ||||||
|           do |           do | ||||||
|             [ -e "$f" ] || continue |  | ||||||
|             export STEP="results-code$( |             export STEP="results-code$( | ||||||
|               echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p')" |               echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p')" | ||||||
|             export CONTEXT="results / code$( |             export CONTEXT="results / code$( | ||||||
|               echo $f | sed -n 's/.*code-.*-\(.*\).csv/ (\1)/p')" |               echo $f | sed -n 's/.*code-.*-\(.*\).csv/ (\1)/p')" | ||||||
|             export PREV="$(curl -sS \ |             export PREV="$(curl -sS \ | ||||||
|               "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ |               "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ | ||||||
|               | jq -re "select(.sha != env.GITHUB_SHA) | .statuses[] |               | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[] | ||||||
|                 | select(.context == env.CONTEXT).description |                 | select(.context == env.CONTEXT).description | ||||||
|                 | capture(\"Code size is (?<result>[0-9]+)\").result" \ |                 | capture("Code size is (?<result>[0-9]+)").result' \ | ||||||
|               || echo 0)" |               || echo 0)" | ||||||
|             echo $PREV |  | ||||||
|             export DESCRIPTION="$(./scripts/code.py -u $f -s | awk ' |             export DESCRIPTION="$(./scripts/code.py -u $f -s | awk ' | ||||||
|               NR==2 {printf "Code size is %d B",$2} |               NR==2 {printf "Code size is %d B",$2} | ||||||
|               NR==2 && ENVIRON["PREV"] != 0 { |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/$2}')" |                 printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')" | ||||||
|             jq -n '{ |             jq -n '{ | ||||||
|               state: "success", |               state: "success", | ||||||
|               context: env.CONTEXT, |               context: env.CONTEXT, | ||||||
| @@ -240,8 +232,7 @@ jobs: | |||||||
|                 echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p').json |                 echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p').json | ||||||
|           done |           done | ||||||
|       - name: upload-status |       - name: upload-status | ||||||
|         continue-on-error: true |         if: ${{matrix.arch == 'thumb'}} | ||||||
|         if: matrix.arch == 'thumb' |  | ||||||
|         uses: actions/upload-artifact@v2 |         uses: actions/upload-artifact@v2 | ||||||
|         with: |         with: | ||||||
|           name: status |           name: status | ||||||
| @@ -250,7 +241,7 @@ jobs: | |||||||
|  |  | ||||||
|   # run under Valgrind to check for memory errors |   # run under Valgrind to check for memory errors | ||||||
|   valgrind: |   valgrind: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - name: install |       - name: install | ||||||
| @@ -264,14 +255,14 @@ jobs: | |||||||
|           sudo apt-get update -qq |           sudo apt-get update -qq | ||||||
|           sudo apt-get install -qq valgrind |           sudo apt-get install -qq valgrind | ||||||
|           valgrind --version |           valgrind --version | ||||||
| #      # normal tests, we don't need to test all geometries |       # normal tests, we don't need to test all geometries | ||||||
| #      - name: test-valgrind |       - name: test-valgrind | ||||||
| #        run: make test TESTFLAGS+="-k --valgrind" |         run: make test TESTFLAGS+="-k --valgrind" | ||||||
|  |  | ||||||
|   # self-host with littlefs-fuse for a fuzz-like test |   # self-host with littlefs-fuse for a fuzz-like test | ||||||
|   fuse: |   fuse: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     if: "!endsWith(github.ref, '-prefix')" |     if: ${{!endsWith(github.ref, '-prefix')}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - name: install |       - name: install | ||||||
| @@ -316,8 +307,8 @@ jobs: | |||||||
|  |  | ||||||
|   # test migration using littlefs-fuse |   # test migration using littlefs-fuse | ||||||
|   migrate: |   migrate: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     if: "!endsWith(github.ref, '-prefix')" |     if: ${{!endsWith(github.ref, '-prefix')}} | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - name: install |       - name: install | ||||||
| @@ -383,9 +374,8 @@ jobs: | |||||||
|  |  | ||||||
|   # collect coverage info |   # collect coverage info | ||||||
|   coverage: |   coverage: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-18.04 | ||||||
|     needs: [test] |     needs: [test] | ||||||
|     continue-on-error: true |  | ||||||
|     steps: |     steps: | ||||||
|       - uses: actions/checkout@v2 |       - uses: actions/checkout@v2 | ||||||
|       - name: install |       - name: install | ||||||
| @@ -393,7 +383,7 @@ jobs: | |||||||
|           sudo apt-get update -qq |           sudo apt-get update -qq | ||||||
|           sudo apt-get install -qq python3 python3-pip lcov |           sudo apt-get install -qq python3 python3-pip lcov | ||||||
|           sudo pip3 install toml |           sudo pip3 install toml | ||||||
|       # yes we continue-on-error on every step, continue-on-error |       # yes we continue-on-error nearly every step, continue-on-error | ||||||
|       # at job level apparently still marks a job as failed, which isn't |       # at job level apparently still marks a job as failed, which isn't | ||||||
|       # what we want |       # what we want | ||||||
|       - uses: actions/download-artifact@v2 |       - uses: actions/download-artifact@v2 | ||||||
| @@ -410,12 +400,10 @@ jobs: | |||||||
|           ./scripts/coverage.py results/coverage.info -o results/coverage.csv |           ./scripts/coverage.py results/coverage.info -o results/coverage.csv | ||||||
|       - name: upload-results |       - name: upload-results | ||||||
|         uses: actions/upload-artifact@v2 |         uses: actions/upload-artifact@v2 | ||||||
|         continue-on-error: true |  | ||||||
|         with: |         with: | ||||||
|           name: results |           name: results | ||||||
|           path: results |           path: results | ||||||
|       - name: collect-status |       - name: collect-status | ||||||
|         continue-on-error: true |  | ||||||
|         run: | |         run: | | ||||||
|           mkdir -p status |           mkdir -p status | ||||||
|           [ -e results/coverage.csv ] || exit 0 |           [ -e results/coverage.csv ] || exit 0 | ||||||
| @@ -423,15 +411,14 @@ jobs: | |||||||
|           export CONTEXT="results / coverage" |           export CONTEXT="results / coverage" | ||||||
|           export PREV="$(curl -sS \ |           export PREV="$(curl -sS \ | ||||||
|             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ |             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \ | ||||||
|             | jq -re "select(.sha != env.GITHUB_SHA) | .statuses[] |             | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[] | ||||||
|               | select(.context == env.CONTEXT).description |               | select(.context == env.CONTEXT).description | ||||||
|               | capture(\"Coverage is (?<result>[0-9\\\\.]+)\").result" \ |               | capture("Coverage is (?<result>[0-9\\.]+)").result' \ | ||||||
|             || echo 0)" |             || echo 0)" | ||||||
|           export DESCRIPTION="$( |           export DESCRIPTION="$( | ||||||
|             ./scripts/coverage.py -u results/coverage.csv -s \ |             ./scripts/coverage.py -u results/coverage.csv -s | awk -F '[ /%]+' ' | ||||||
|             | awk -F '[ /%]+' ' |  | ||||||
|               NR==2 {printf "Coverage is %.1f%% of %d lines",$4,$3} |               NR==2 {printf "Coverage is %.1f%% of %d lines",$4,$3} | ||||||
|               NR==2 && ENVIRON["PREV"] != 0 { |               NR==2 && ENVIRON["PREV"]+0 != 0 { | ||||||
|                 printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')" |                 printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')" | ||||||
|           jq -n '{ |           jq -n '{ | ||||||
|             state: "success", |             state: "success", | ||||||
| @@ -442,7 +429,6 @@ jobs: | |||||||
|             | tee status/coverage.json |             | tee status/coverage.json | ||||||
|       - name: upload-status |       - name: upload-status | ||||||
|         uses: actions/upload-artifact@v2 |         uses: actions/upload-artifact@v2 | ||||||
|         continue-on-error: true |  | ||||||
|         with: |         with: | ||||||
|           name: status |           name: status | ||||||
|           path: status |           path: status | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user