Skip to content

Commit 8bb6902

Browse files
authored
Merge pull request #34 from ChainSafe/dapplion/run-benchmarks
Polish benchmarks and run in CI
2 parents 39e7344 + 6f80a23 commit 8bb6902

File tree

10 files changed

+355
-279
lines changed

10 files changed

+355
-279
lines changed

.github/workflows/main.yml

+29
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,35 @@ jobs:
8686
path: prebuild/*.node
8787
if-no-files-found: error
8888

89+
benchmark:
90+
needs: ["build-swig"]
91+
runs-on: ubuntu-latest
92+
steps:
93+
- uses: actions/checkout@v2
94+
with:
95+
submodules: true
96+
- uses: actions/setup-node@v1
97+
with:
98+
node-version: 12
99+
100+
- name: Get SWIG pre-built
101+
uses: actions/download-artifact@v2
102+
with:
103+
name: blst_wrap.cpp
104+
path: prebuild/
105+
106+
- name: Install && Build TS + bindings
107+
run: yarn bootstrap
108+
env:
109+
SWIG_SKIP_RUN: true
110+
111+
- name: Benchmark
112+
run: |
113+
node_modules/.bin/ts-node benchmark/blstOps.ts
114+
node_modules/.bin/ts-node benchmark/batchVerify.ts
115+
node_modules/.bin/ts-node benchmark/multithread.ts
116+
node_modules/.bin/ts-node benchmark/multithreadOverhead.ts
117+
89118
publish:
90119
needs: ["build-swig", "build"]
91120
if: startsWith(github.ref, 'refs/tags/')

benchmark/batchVerify.ts

+13-12
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
import crypto from "crypto";
22
import * as bls from "../src/lib";
3-
import { runBenchmark } from "./runner";
3+
import { Csv } from "./utils/csv";
4+
import { BenchmarkRunner } from "./utils/runner";
45

56
(async function () {
6-
const results: { i: number; serie: number; batch: number }[] = [];
7+
const runner = new BenchmarkRunner("Batch verify benchmark");
8+
const csv = new Csv<"n" | "serie" | "batch" | "ratio">();
79

810
for (let i = 1; i <= 128; i = i * 2) {
9-
const serie = await runBenchmark({
11+
const serie = await runner.run({
1012
id: `${i} - BLS verification`,
1113
before: () => {
1214
const msg = Buffer.alloc(32, i);
@@ -22,7 +24,7 @@ import { runBenchmark } from "./runner";
2224
},
2325
});
2426

25-
const batch = await runBenchmark({
27+
const batch = await runner.run({
2628
id: `${i} - BLS verification batch`,
2729
before: () => {
2830
const msg = Buffer.alloc(32, i);
@@ -36,14 +38,13 @@ import { runBenchmark } from "./runner";
3638
},
3739
});
3840

39-
results.push({ i, serie, batch });
41+
csv.addRow({
42+
n: i,
43+
serie: serie / i,
44+
batch: batch / i,
45+
ratio: batch / serie,
46+
});
4047
}
4148

42-
console.log(
43-
results
44-
.map(({ i, serie, batch }) =>
45-
[i, serie / i, batch / i, batch / i / (serie / i)].join(", ")
46-
)
47-
.join("\n")
48-
);
49+
csv.logToConsole();
4950
})();

benchmark/index.ts benchmark/blstOps.ts

+27-25
Original file line numberDiff line numberDiff line change
@@ -14,14 +14,16 @@ import {
1414
Signature,
1515
verify,
1616
} from "../src/lib";
17-
import { runBenchmark } from "./runner";
17+
import { BenchmarkRunner } from "./utils/runner";
1818

1919
const dst = "BLS_SIG_BLS12381G2-SHA256-SSWU-RO_POP_";
2020
const hashOrEncode = true;
2121
const msg = Buffer.from("Mr F was here");
2222

2323
(async function () {
24-
await runBenchmark({
24+
const runner = new BenchmarkRunner("BLS opts benchmark");
25+
26+
await runner.run({
2527
id: "Scalar multiplication G1 (255-bit, constant-time)",
2628
before: () => {},
2729
beforeEach: () => ({
@@ -33,7 +35,7 @@ const msg = Buffer.from("Mr F was here");
3335
},
3436
});
3537

36-
await runBenchmark({
38+
await runner.run({
3739
id: "Scalar multiplication G2 (255-bit, constant-time)",
3840
before: () => {},
3941
beforeEach: () => ({
@@ -45,7 +47,7 @@ const msg = Buffer.from("Mr F was here");
4547
},
4648
});
4749

48-
await runBenchmark({
50+
await runner.run({
4951
id: "EC add G1 (constant-time)",
5052
before: () => {},
5153
beforeEach: () => {
@@ -57,7 +59,7 @@ const msg = Buffer.from("Mr F was here");
5759
},
5860
});
5961

60-
await runBenchmark({
62+
await runner.run({
6163
id: "EC add G2 (constant-time)",
6264
before: () => {},
6365
beforeEach: () => {
@@ -69,7 +71,7 @@ const msg = Buffer.from("Mr F was here");
6971
},
7072
});
7173

72-
await runBenchmark<{ pk: P1_Affine; sig: P2_Affine }, Pairing>({
74+
await runner.run<{ pk: P1_Affine; sig: P2_Affine }, Pairing>({
7375
id: "Pairing (Miller loop + Final Exponentiation)",
7476
before: () => {
7577
const sk = new blst.SecretKey();
@@ -99,7 +101,7 @@ const msg = Buffer.from("Mr F was here");
99101
},
100102
});
101103

102-
await runBenchmark({
104+
await runner.run({
103105
id: "Hash to G2 (Draft #9) + affine conversion",
104106
before: () => {},
105107
beforeEach: () => new blst.P2(),
@@ -115,42 +117,42 @@ const msg = Buffer.from("Mr F was here");
115117
{ id: "P1", P: blst.P1, p: blst.BLS12_381_G1 },
116118
{ id: "P2", P: blst.P2, p: blst.BLS12_381_G2 },
117119
]) {
118-
await runBenchmark({
120+
await runner.run({
119121
id: `${id} to_affine`,
120122
before: () => {},
121123
beforeEach: () => new P(p),
122124
run: (p) => p.to_affine(),
123125
});
124126

125-
await runBenchmark({
127+
await runner.run({
126128
id: `${id} to_jacobian`,
127129
before: () => {},
128130
beforeEach: () => p.dup(),
129131
run: (p) => p.to_jacobian(),
130132
});
131133

132-
await runBenchmark({
134+
await runner.run({
133135
id: `${id} compress`,
134136
before: () => {},
135137
beforeEach: () => new P(p),
136138
run: (p) => p.compress(),
137139
});
138140

139-
await runBenchmark({
141+
await runner.run({
140142
id: `${id} serialize`,
141143
before: () => {},
142144
beforeEach: () => new P(p),
143145
run: (p) => p.serialize(),
144146
});
145147

146-
await runBenchmark({
148+
await runner.run({
147149
id: `${id} from compress`,
148150
before: () => {},
149151
beforeEach: () => new P(p).compress(),
150152
run: (bytes) => new P(bytes),
151153
});
152154

153-
await runBenchmark({
155+
await runner.run({
154156
id: `${id} from serialize`,
155157
before: () => {},
156158
beforeEach: () => new P(p).serialize(),
@@ -169,25 +171,25 @@ const msg = Buffer.from("Mr F was here");
169171
P1_Affine: new blst.P1(sk).to_affine(),
170172
P2_Affine: new blst.P2(sk).to_affine(),
171173
})) {
172-
await runBenchmark({
174+
await runner.run({
173175
id: `${id} on_curve`,
174176
before: () => {},
175177
run: () => p.on_curve(),
176178
});
177179

178-
await runBenchmark({
180+
await runner.run({
179181
id: `${id} in_group`,
180182
before: () => {},
181183
run: () => p.in_group(),
182184
});
183185

184-
await runBenchmark({
186+
await runner.run({
185187
id: `${id} is_inf`,
186188
before: () => {},
187189
run: () => p.is_inf(),
188190
});
189191

190-
await runBenchmark({
192+
await runner.run({
191193
id: `${id} dup`,
192194
before: () => {},
193195
run: () => p.dup(),
@@ -206,7 +208,7 @@ const msg = Buffer.from("Mr F was here");
206208
});
207209

208210
// Fastest than using .dup()
209-
await runBenchmark<InstanceType<typeof blst.P1>[]>({
211+
await runner.run<InstanceType<typeof blst.P1>[]>({
210212
id: `BLS aggregate ${aggCount} from P1[] with .add`,
211213
before: () => sks.map((sk) => new blst.P1(sk)),
212214
run: (pks) => {
@@ -215,7 +217,7 @@ const msg = Buffer.from("Mr F was here");
215217
},
216218
});
217219

218-
await runBenchmark<InstanceType<typeof blst.P1>[]>({
220+
await runner.run<InstanceType<typeof blst.P1>[]>({
219221
id: `BLS aggregate ${aggCount} from P1[] with .add add .dup first`,
220222
before: () => sks.map((sk) => new blst.P1(sk)),
221223
run: (pks) => {
@@ -224,7 +226,7 @@ const msg = Buffer.from("Mr F was here");
224226
},
225227
});
226228

227-
await runBenchmark<InstanceType<typeof blst.P1_Affine>[]>({
229+
await runner.run<InstanceType<typeof blst.P1_Affine>[]>({
228230
id: `BLS aggregate ${aggCount} from P1_Aff[] with .add`,
229231
before: () => sks.map((sk) => new blst.P1(sk).to_affine()),
230232
run: (pks) => {
@@ -234,7 +236,7 @@ const msg = Buffer.from("Mr F was here");
234236
});
235237

236238
// This is way more expensive because .aggregate does a group check on each key
237-
await runBenchmark<InstanceType<typeof blst.P1_Affine>[]>({
239+
await runner.run<InstanceType<typeof blst.P1_Affine>[]>({
238240
id: `BLS aggregate ${aggCount} from P1_Aff[] with .aggregate`,
239241
before: () => sks.map((sk) => new blst.P1(sk).to_affine()),
240242
run: (pks) => {
@@ -246,7 +248,7 @@ const msg = Buffer.from("Mr F was here");
246248

247249
// BLS lib
248250

249-
await runBenchmark({
251+
await runner.run({
250252
id: "BLS signature",
251253
before: () => {},
252254
beforeEach: () => SecretKey.fromKeygen(crypto.randomBytes(32)),
@@ -255,7 +257,7 @@ const msg = Buffer.from("Mr F was here");
255257
},
256258
});
257259

258-
await runBenchmark<{ pk: PublicKey; sig: Signature }>({
260+
await runner.run<{ pk: PublicKey; sig: Signature }>({
259261
id: "BLS verification",
260262
before: () => {
261263
const sk = SecretKey.fromKeygen(crypto.randomBytes(32));
@@ -268,8 +270,8 @@ const msg = Buffer.from("Mr F was here");
268270
},
269271
});
270272

271-
for (const n of [32, 128, 512]) {
272-
await runBenchmark<{ pks: PublicKey[]; sig: Signature }>({
273+
for (const n of [32, 128]) {
274+
await runner.run<{ pks: PublicKey[]; sig: Signature }>({
273275
id: `BLS agg verif of 1 msg by ${n} pubkeys`,
274276
before: () => {
275277
const pks: PublicKey[] = [];

0 commit comments

Comments
 (0)