Skip to content

Test

Test #138

Workflow file for this run

# Run final tests only when attempting to merge, shown as skipped status checks beforehand
name: Merge group tests
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
branches: [main]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
# NOTE: Replica of lurk-rs merge-tests GPU benchmark, no GPU here and only benches a regular fibonacci function
gpu-benchmark:
if: github.event_name != 'pull_request' || github.event.action == 'enqueued'
name: Run fibonacci bench
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Install dependencies
- uses: actions-rs/toolchain@v1
- uses: Swatinem/rust-cache@v2
- uses: taiki-e/install-action@v2
with:
tool: [email protected]
- name: Install criterion
run: |
cargo install cargo-criterion
cargo install criterion-table
- name: Set env vars
run: |
# Set bench output format type
echo "LURK_BENCH_OUTPUT=commit-comment" | tee -a $GITHUB_ENV
echo "BASE_SHA=${{ github.event.merge_group.base_sha }}" | tee -a $GITHUB_ENV
# Get CPU model
echo "CPU_MODEL=$(grep '^model name' /proc/cpuinfo | head -1 | awk -F ': ' '{ print $2 }')" | tee -a $GITHUB_ENV
# Checkout gh-pages to check for cached bench result
- name: Checkout gh-pages
uses: actions/checkout@v4
with:
ref: gh-pages
path: gh-pages
- name: Check for cached bench result
id: cached-bench
run: |
if [ -f "${{ env.BASE_SHA }}.json" ]
then
echo "cached=true" | tee -a $GITHUB_OUTPUT
cp ${{ env.BASE_SHA }}.json ..
else
echo "cached=false" | tee -a $GITHUB_OUTPUT
fi
working-directory: ${{ github.workspace }}/gh-pages
# Checkout base branch for comparative bench
- name: Checkout main
uses: actions/checkout@v4
if: steps.cached-bench.outputs.cached == 'false'
with:
ref: main
path: main
- name: Run GPU bench on base branch
if: steps.cached-bench.outputs.cached == 'false'
run: |
# Copy justfile & env to main, overwriting existing config with that of PR branch
cp ../benches/justfile ../benches/bench.env .
# Run benchmark
just --dotenv-filename bench.env bench fibonacci
# Copy bench output to PR branch
cp ${{ env.BASE_SHA }}.json ..
working-directory: ${{ github.workspace }}/main
- name: Run GPU bench on PR branch
run: |
just --justfile benches/justfile --dotenv-filename benches/bench.env bench fibonacci
cp benches/${{ github.sha }}.json .
working-directory: ${{ github.workspace }}
- name: copy the benchmark template and prepare it with data
run: |
cp .github/tables.toml .
# Get total RAM in GB
TOTAL_RAM=$(grep MemTotal /proc/meminfo | awk '{$2=$2/(1024^2); print $2, "GB RAM";}')
# Use conditionals to ensure that only non-empty variables are inserted
[[ ! -z "${{ env.CPU_MODEL }}" ]] && sed -i "/^\"\"\"$/i ${{ env.CPU_MODEL }}" tables.toml
[[ ! -z "$TOTAL_RAM" ]] && sed -i "/^\"\"\"$/i $TOTAL_RAM" tables.toml
working-directory: ${{ github.workspace }}
# TODO: Fail on regression (e.g. `criterion-table` comparative output of `1.10+ slower`)
# Create a `criterion-table` and write in commit comment
- name: Run `criterion-table`
run: cat ${{ env.BASE_SHA }}.json ${{ github.sha }}.json | criterion-table > BENCHMARKS.md
- name: Write bench on commit comment
uses: peter-evans/commit-comment@v3
with:
body-path: BENCHMARKS.md
# Check for a slowdown >= 10%. If so, open an issue
- name: Check for perf regression
id: regression-check
run: |
regressions=$(awk -F'[*x]' '/slower/{print $11}' BENCHMARKS.md)
echo $regressions
for r in $regressions
do
if (( $(echo "$r >= 0.10" | bc -l) ))
then
exit 1
fi
done
continue-on-error: true
# Not possible to use ${{ github.event.number }} with the `merge_group` trigger
- name: Get PR number from merge branch
run: |
echo "PR_NUMBER=$(echo ${{ github.event.merge_group.head_ref }} | sed -e 's/.*pr-\(.*\)-.*/\1/')" | tee -a $GITHUB_ENV
- name: Create file for issue
if: steps.regression-check.outcome == 'failure'
run: |
printf '%s\n' "Regression >= 10% found during merge for PR #${{ env.PR_NUMBER }}
Commit: ${{ github.sha }}
Workflow run: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" > ./_body.md
- name: Open issue on regression
if: steps.regression-check.outcome == 'failure'
uses: peter-evans/create-issue-from-file@v4
with:
title: ':rotating_light: Performance regression detected for PR #${{ env.PR_NUMBER }}'
content-filepath: ./_body.md
labels: |
P-Performance
automated issue
# TODO: Should this go to the `benchmarks/history` path and get zipped like the other benches?
# If not, where should it go?
# In Lurk, rename the file to include GPU name to ensure comparable specs:
# mv ../${{ github.sha }}.json ./${{ github.sha }}-${{ env.GPU_MODEL }}.json
- name: Remove old main bench
run: |
rm ${{ env.BASE_SHA }}.json
working-directory: ${{ github.workspace }}
- name: Cache bench result on `gh-pages` branch if no regression
if: steps.regression-check.outcome != 'failure'
uses: stefanzweifel/git-auto-commit-action@v5
with:
branch: gh-pages
commit_message: '[automated] Save bench result from PR #${{ env.PR_NUMBER }}'
file_pattern: '${{ github.sha }}.json'