4
4
push :
5
5
branches :
6
6
- main
7
+ schedule :
8
+ - cron : " 0 */1 * * *"
7
9
8
10
concurrency :
9
11
group : ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
@@ -13,22 +15,52 @@ permissions:
13
15
contents : read # to fetch code (actions/checkout)
14
16
15
17
env :
16
- # GITHUB_TOKEN: ${{ secrets.OB_BENCH_TOKEN }}
17
- # BENCHMARKS_REPO: ev-br/ob-bench-asv
18
18
ASV_CONFIG : asv.conf.json
19
+ NAME_PREFIX : gha
19
20
20
21
jobs :
22
+
23
+ trigger :
24
+ runs-on : ubuntu-latest
25
+ steps :
26
+ - name : Check out the repository
27
+ uses : actions/checkout@v4
28
+ with :
29
+ fetch-depth : 0 # To fetch all commits to be able to generate benchmarks html
30
+ token : ${{ secrets.AWS_BENCHMARKS }}
31
+
32
+ - name : Prepare the tracker branch
33
+ run : |
34
+ git config --global user.email "project@openblas"
35
+ git config --global user.name "OB benchmark bot"
36
+
37
+ # XXX this assumes the `tracker` branch is the squash-merge from main
38
+ # revert the past merge and merge the current main instead
39
+ # (cannot just merge main without a revert because force-pushes to main)
40
+ git checkout tracker
41
+ git revert HEAD --no-edit
42
+
43
+ # XXX needed locally not on CI?
44
+ # git commit -am'revert squash-merging main'
45
+
46
+ git merge main --squash
47
+ git commit -am"squash-merge main at `git rev-parse main`"
48
+ git push origin HEAD
49
+ git checkout main
50
+
51
+
21
52
bench :
22
53
strategy :
23
54
fail-fast : false
24
55
matrix :
25
56
include :
26
57
# define matrix.name to identify github actions machine as hostname changes everytime
27
58
- image : " cirun-aws-runner-graviton--${{ github.run_id }}"
28
- name : " gh -graviton"
59
+ name : " gha-aws -graviton"
29
60
- image : " cirun-aws-runner-cascade-lake--${{ github.run_id }}"
30
- name : " gh -skylake"
61
+ name : " gha-aws -skylake"
31
62
runs-on : ${{ matrix.image }}
63
+ needs : trigger
32
64
33
65
steps :
34
66
- uses : actions/checkout@v4
@@ -58,16 +90,22 @@ jobs:
58
90
run : |
59
91
# --break-system-packages is required on ubuntu noble
60
92
pip3 install "numpy<2" meson meson-python ninja build asv virtualenv --break-system-packages
93
+
61
94
# install the nightly OpenBLAS wheel
62
- pip install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 --break-system-packages
95
+ pip3 install -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple scipy-openblas32 --break-system-packages
96
+
97
+ # dump the pkg-config for the ASV build to pick up (cf $PKG_CONFIG_PATH usage under `Run benchmarks`)
63
98
python3 -c'import scipy_openblas32 as so; print(so.get_pkg_config())' > scipy_openblas.pc
64
- # export PKG_CONFIG_PATH=$PWD
65
99
66
100
- name : Print OpenBLAS information
67
101
run : |
68
102
echo "scipy_openblas.pc contents: "
69
103
cat scipy_openblas.pc
70
104
105
+ # store the OpenBLAS wheel info to add to the gh-pages commit message
106
+ echo ${{ matrix.name }}":" > wheel_info
107
+ python3 -c'import scipy_openblas32 as sc; print(f"version={sc.__version__} - {sc.get_openblas_config()}")' >> wheel_info
108
+
71
109
- name : Set and log asv machine configuration
72
110
run : |
73
111
python3 -m asv machine --yes --config asv.conf.json
@@ -82,20 +120,25 @@ jobs:
82
120
83
121
- name : Run benchmarks
84
122
run : |
123
+ # asv fails to checkout tracker unless we help it a bit:
124
+ git checkout tracker
125
+ git checkout main
126
+ git branch
127
+
85
128
python3 -m asv run --config $ASV_CONFIG -v
86
- ls -l .asv/results
87
- echo ">>> results/machine"
88
- ls -l .asv/results/${{ matrix.name }}
89
129
env :
90
130
PKG_CONFIG_PATH : ${{ github.workspace }}
91
131
92
132
- name : Store/Upload benchmark results
93
133
uses : actions/upload-artifact@v4
94
134
with :
95
135
name : ${{ matrix.name }}
96
- path : .asv/results/*
136
+ path : |
137
+ .asv/results/*
138
+ wheel_info
97
139
if-no-files-found : error
98
140
141
+
99
142
combine-and-publish :
100
143
runs-on : ubuntu-latest
101
144
needs : bench
@@ -112,17 +155,33 @@ jobs:
112
155
- name : Collect past runs
113
156
run : |
114
157
git checkout gh-pages
115
- mkdir -p .asv/resuls
116
- cp -r results .asv/results
158
+ mkdir -p .asv/results
159
+ cp -r results .asv
117
160
118
161
- name : Combine the runs
119
162
run : |
120
- cp -r gh-skylake/* .asv/results
121
- cp -r gh-graviton/* .asv/results
122
- ls -l
123
- ls -l .asv/results
163
+ # NB artifact names start with gha-
164
+ artifacts=`ls -d ./$NAME_PREFIX-*`
165
+ echo "found ARTIFACTS = "$artifacts
166
+
167
+ # put the benchmark results to where asv expects them:
168
+ # .asv/
169
+ # results/
170
+ # benchmarks.json
171
+ # machine-name-1/
172
+ # machine.json
173
+ # commit_hash-1...json
174
+ # machine-name-2/
175
+ # machine.json
176
+ # commit_hash-1...json
177
+ for dir in $artifacts; do cp -r $dir/.asv/* .asv; done
178
+
179
+ # combine the wheel_info files from the artifacts
180
+ for dir in $artifacts; do cat $dir/wheel_info >>./wheel_info; done
181
+ cat wheel_info
182
+
183
+ # return to main to be able to generate the new html report
124
184
git checkout main
125
- ls -l
126
185
127
186
- name : Set up Python
128
187
uses : actions/setup-python@v4
@@ -131,6 +190,9 @@ jobs:
131
190
132
191
- name : Set up ASV and generate the html
133
192
run : |
193
+ git checkout tracker
194
+ git checkout main
195
+
134
196
pip install asv
135
197
asv machine --yes --config $ASV_CONFIG
136
198
asv publish --config $ASV_CONFIG -v
@@ -145,5 +207,5 @@ jobs:
145
207
git checkout gh-pages
146
208
cp -r .asv/results results
147
209
git add results/
148
- git commit -am"add results for `git rev-parse origin/gh-pages`"
210
+ git commit -am"add results for `git rev-parse origin/gh-pages`" -m"`cat wheel_info`"
149
211
git push origin HEAD
0 commit comments