-
Notifications
You must be signed in to change notification settings - Fork 0
178 lines (154 loc) · 6.09 KB
/
benchmarks.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
name: Benchmarks
on:
schedule:
# Mon-Fri at 04:39 UTC, see https://crontab.guru/
# Currently disabled since benchmarks can be run on demand
# - cron: "39 4 * * 1-5"
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
# Required shell entrypoint to have properly activated conda environments
run:
shell: bash -l {0}
jobs:
tests:
name: ${{ matrix.name_prefix }} ${{ matrix.os }} py${{ matrix.python_version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest"]
python-version: ["3.11"]
extra-env: [""]
cubed-config: [tests/configs/local_single-threaded.yaml]
lithops-config: [""]
name_prefix: [single-threaded]
include:
- os: "ubuntu-latest"
python-version: "3.11"
extra-env: ci/environment-lithops-aws.yml
cubed-config: tests/configs/lithops_aws.yaml
lithops-config: .github/workflows/.lithops_config_aws
name_prefix: lithops-aws
- os: "ubuntu-latest"
python-version: "3.11"
extra-env: ci/environment-lithops-gcp.yml
cubed-config: tests/configs/lithops_gcp.yaml
lithops-config: .github/workflows/.lithops_config_gcp
name_prefix: lithops-gcp
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Copy lithops configuration templates
run: |
cp $GITHUB_WORKSPACE/.github/workflows/.lithops_config_aws.template $GITHUB_WORKSPACE/.github/workflows/.lithops_config_aws
cp $GITHUB_WORKSPACE/.github/workflows/.lithops_config_gcp.template $GITHUB_WORKSPACE/.github/workflows/.lithops_config_gcp
- name: Google auth
id: 'auth'
uses: 'google-github-actions/auth@v2'
with:
credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'
create_credentials_file: true
- name: Configure lithops AWS
uses: microsoft/variable-substitution@v1
with:
files: ${{ github.workspace }}/.github/workflows/.lithops_config_aws
env:
aws.access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws.secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Configure lithops GCP
uses: microsoft/variable-substitution@v1
with:
files: ${{ github.workspace }}/.github/workflows/.lithops_config_gcp
env:
gcp.credentials_path: ${{ steps.auth.outputs.credentials_file_path }}
- name: Set up environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
condarc-file: ci/condarc
python-version: ${{ matrix.python_version }}
environment-file: ci/environment.yml
- name: Add extra packages to environment
if: ${{ matrix.extra-env != '' }}
run: mamba env update --file ${{ matrix.extra-env }}
- name: Add test dependencies
run: mamba env update --file ci/environment-test.yml
- name: Dump environment
run: |
# For debugging
echo -e "--\n--Conda Environment (re-create this with \`conda env create --name <name> -f <output_file>\`)\n--"
mamba env export | grep -E -v '^prefix:.*$'
- name: Setup Graphviz
uses: ts-graphviz/setup-graphviz@v2
- name: Run benchmarks
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
CUBED_CONFIG: ${{ matrix.cubed-config }}
DB_NAME: ${{ matrix.name_prefix }}-${{ matrix.os }}-py${{ matrix.python_version }}.db
LITHOPS_CONFIG_FILE: ${{ github.workspace }}/${{ matrix.lithops-config }}
NAME_PREFIX: ${{ matrix.name_prefix }}
run: |
pytest --benchmark --basetemp=pytest-temp
- name: Upload benchmark results
uses: actions/upload-artifact@v4
if: always()
with:
name: ${{ matrix.name_prefix }}-${{ matrix.os }}-py${{ matrix.python_version }}
path: |
${{ matrix.name_prefix }}-${{ matrix.os }}-py${{ matrix.python_version }}.db
history/
pytest-temp/
mamba_env_export.yml
process-results:
needs: tests
name: Combine separate benchmark results
if: always() && github.repository == 'cubed-dev/cubed-benchmarks'
runs-on: ubuntu-latest
concurrency:
# Fairly strict concurrency rule to avoid stepping on benchmark db.
group: process-benchmarks
cancel-in-progress: false
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install dependencies
run: pip install alembic
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: benchmarks
- name: Download benchmark db
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-2 # this is needed for boto for some reason
DB_NAME: benchmark.db
run: |
aws s3 cp s3://cubed-runtime-ci/benchmarks/$DB_NAME . || true
- name: Combine benchmarks
run: |
ls -lhR benchmarks
bash ci/scripts/combine-dbs.sh
- name: Upload benchmark db
if: always() && github.ref == 'refs/heads/main' && github.repository == 'cubed-dev/cubed-benchmarks'
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-2 # this is needed for boto for some reason
DB_NAME: benchmark.db
run: |
aws s3 cp $DB_NAME s3://cubed-runtime-ci/benchmarks/
- name: Upload benchmark results as artifact
uses: actions/upload-artifact@v3
with:
name: benchmark
path: benchmark.db