Skip to content

Commit

Permalink
chore(NODE-6720): migrate multibench tests (#4399)
Browse files Browse the repository at this point in the history
  • Loading branch information
nbbeeken authored Feb 6, 2025
1 parent a1c83de commit 198fb72
Show file tree
Hide file tree
Showing 10 changed files with 284 additions and 4 deletions.
2 changes: 1 addition & 1 deletion .evergreen/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3323,7 +3323,7 @@ tasks:
tags:
- run-spec-benchmark-tests
- performance
exec_timeout_secs: 3600
exec_timeout_secs: 18000
commands:
- command: expansions.update
type: setup
Expand Down
2 changes: 1 addition & 1 deletion .evergreen/generate_evergreen_tasks.js
Original file line number Diff line number Diff line change
Expand Up @@ -756,7 +756,7 @@ function addPerformanceTasks() {
const makePerfTaskNEW = (name, MONGODB_CLIENT_OPTIONS) => ({
name,
tags: ['run-spec-benchmark-tests', 'performance'],
exec_timeout_secs: 3600,
exec_timeout_secs: 18000,
commands: [
updateExpansions({
NODE_LTS_VERSION: 'v22.11.0',
Expand Down
65 changes: 65 additions & 0 deletions test/benchmarks/driver_bench/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# Node.js Driver Benchmarks

Set up the driver for development (`npm ci` in the top level of this repo).

Then:

```sh
npm start
```

will build the benchmarks and run them.

## Environment Configuration and Setup

The benchmarks respond to a few environment variables:

- `MONGODB_URI`
- The connection string to run operations against.
CI uses a standalone, you should be able to launch any cluster and point the benchmarks at it via this env var.
- default: `"mongodb://localhost:27017"`
- `MONGODB_DRIVER_PATH`
- The path to the MongoDB Node.js driver.
This MUST be set to the _directory_ the driver is installed in.
**NOT** the file "lib/index.js" that is the driver's export.
- default: 4 directories above driver.mjs (should be the root of this repo)
- `MONGODB_CLIENT_OPTIONS`
- A JSON string that will be passed to the MongoClient constructor
- default: `"{}"`

## Running individual benchmarks

`main.mjs` loops and launches the bench runner for you.

You can launch `runner.mjs` directly and tell it which benchmark to run.

```sh
node lib/runner.mjs suites/multi_bench/grid_fs_upload.mjs
```

## Writing your own benchmark

In the suites directory you can add a new suite folder or add a new `.mts` file to an existing one.

A benchmark must export the following:

```ts
type BenchmarkModule = {
taskSize: number;
before?: () => Promise<void>;
beforeEach?: () => Promise<void>;
run: () => Promise<void>;
afterEach?: () => Promise<void>;
after?: () => Promise<void>;
};
```

Just like mocha we have once before and once after as well as before each and after each hooks.

The `driver.mts` module is intended to hold various helpers for setup and teardown and help abstract some of the driver API.

## Wishlist

- Make it so runner can handle: `./lib/suites/multi_bench/grid_fs_upload.mjs` as an argument so shell path autocomplete makes it easier to pick a benchmark
- Make `main.mjs` accept a filter of some kind to run some of the benchmarks
- TBD
7 changes: 5 additions & 2 deletions test/benchmarks/driver_bench/src/driver.mts
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,11 @@ export const MONGODB_CLIENT_OPTIONS = (() => {
})();

export const MONGODB_URI = (() => {
if (process.env.MONGODB_URI?.length) return process.env.MONGODB_URI;
return 'mongodb://127.0.0.1:27017';
const connectionString = process.env.MONGODB_URI;
if (connectionString?.length) {
return connectionString;
}
return 'mongodb://localhost:27017';
})();

export function snakeToCamel(name: string) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 16;

let db: mongodb.Db;

export async function before() {
await driver.drop();
await driver.create();

db = driver.db;
}

export async function run() {
await db
.aggregate([
{ $documents: [{}] },
{
$set: {
field: {
$reduce: {
input: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
initialValue: [0],
in: { $concatArrays: ['$$value', '$$value'] }
}
}
}
},
{ $unwind: '$field' },
{ $limit: 1000000 }
])
.toArray();
}

export async function after() {
await driver.drop();
await driver.close();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 1500;

let db: mongodb.Db;
let tweet: Record<string, any>;

export async function before() {
await driver.drop();
await driver.create();

tweet = await driver.load('single_and_multi_document/tweet.json', 'json');

db = driver.db;
}

export async function run() {
await db
.aggregate([
{ $documents: [tweet] },
{
$set: {
field: {
$reduce: {
input: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
initialValue: [0],
in: { $concatArrays: ['$$value', '$$value'] }
}
}
}
},
{ $unwind: '$field' },
{ $limit: 1000000 }
])
.toArray();
}

export async function after() {
await driver.drop();
await driver.close();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 16.22;

let collection: mongodb.Collection;

export async function before() {
await driver.drop();
await driver.create();

const tweet = await driver.load('single_and_multi_document/tweet.json', 'json');
await driver.insertManyOf(tweet, 10000);

collection = driver.collection;
}

export async function run() {
await collection.find({}).toArray();
}

export async function after() {
await driver.drop();
await driver.close();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import { Readable, Writable } from 'node:stream';
import { pipeline } from 'node:stream/promises';

import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 52.43;

let bucket: mongodb.GridFSBucket;
let bin: Uint8Array;
let _id: mongodb.ObjectId;
const devNull = () => new Writable({ write: (_, __, callback) => callback() });

export async function before() {
bin = await driver.load('single_and_multi_document/gridfs_large.bin', 'buffer');

await driver.drop();
await driver.create();

bucket = driver.bucket;

await bucket.drop().catch(() => null);

// Create the bucket.
const stream = bucket.openUploadStream('gridfstest');
const largeBin = Readable.from(bin);
await pipeline(largeBin, stream);

_id = stream.id;
}

export async function run() {
const downloadStream = bucket.openDownloadStream(_id);
await pipeline(downloadStream, devNull());
}

export async function after() {
await driver.drop();
await driver.close();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { Readable } from 'node:stream';
import { pipeline } from 'node:stream/promises';

import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 52.43;

let bucket: mongodb.GridFSBucket;
let uploadStream: mongodb.GridFSBucketWriteStream;
let bin: Uint8Array;

export async function before() {
bin = await driver.load('single_and_multi_document/gridfs_large.bin', 'buffer');

await driver.drop();
await driver.create();

bucket = driver.bucket;

await bucket.drop().catch(() => null);
}

export async function beforeEach() {
uploadStream = bucket.openUploadStream('gridfstest');

// Create the bucket.
const stream = bucket.openUploadStream('setup-file.txt');
const oneByteFile = Readable.from('a');
await pipeline(oneByteFile, stream);
}

export async function run() {
const uploadData = Readable.from(bin);
await pipeline(uploadData, uploadStream);
}

export async function after() {
await driver.drop();
await driver.close();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { driver, type mongodb } from '../../driver.mjs';

export const taskSize = 27.31;

let collection: mongodb.Collection;
let documents: any[];
let largeDoc: any;

export async function before() {
largeDoc = await driver.load('single_and_multi_document/large_doc.json', 'json');
}

export async function beforeEach() {
await driver.drop();
await driver.create();

// Make new "documents" so the _id field is not carried over from the last run
documents = Array.from({ length: 10 }, () => ({ ...largeDoc })) as any[];

collection = driver.collection;
}

export async function run() {
await collection.insertMany(documents, { ordered: true });
}

export async function after() {
await driver.drop();
await driver.close();
}

0 comments on commit 198fb72

Please sign in to comment.