From 0a2ad07458b3b66bd3bd1924cc871a7c19ec27ef Mon Sep 17 00:00:00 2001 From: Bailey Pearson Date: Thu, 8 Sep 2022 10:22:41 -0400 Subject: [PATCH] test(NODE-4590): add parallel driver benchmarks (#3389) --- test/benchmarks/driverBench/common.js | 43 ++- test/benchmarks/driverBench/index.js | 363 ++---------------- .../benchmarks/mongoBench/suites/bsonBench.js | 49 +++ test/benchmarks/mongoBench/suites/index.js | 11 + .../mongoBench/suites/multiBench.js | 147 +++++++ .../mongoBench/suites/parallelBench.js | 199 ++++++++++ .../mongoBench/suites/singleBench.js | 116 ++++++ 7 files changed, 602 insertions(+), 326 deletions(-) create mode 100644 test/benchmarks/mongoBench/suites/bsonBench.js create mode 100644 test/benchmarks/mongoBench/suites/index.js create mode 100644 test/benchmarks/mongoBench/suites/multiBench.js create mode 100644 test/benchmarks/mongoBench/suites/parallelBench.js create mode 100644 test/benchmarks/mongoBench/suites/singleBench.js diff --git a/test/benchmarks/driverBench/common.js b/test/benchmarks/driverBench/common.js index 7612df1bc1..e194ea9187 100644 --- a/test/benchmarks/driverBench/common.js +++ b/test/benchmarks/driverBench/common.js @@ -2,9 +2,14 @@ const fs = require('fs'); const path = require('path'); +const { Readable } = require('stream'); +const { pipeline } = require('stream/promises'); const { MongoClient } = require('../../..'); const { GridFSBucket } = require('../../..'); +// eslint-disable-next-line no-restricted-modules +const { MONGODB_ERROR_CODES } = require('../../../lib/error'); + const DB_NAME = 'perftest'; const COLLECTION_NAME = 'corpus'; @@ -48,7 +53,11 @@ function initCollection() { } function dropCollection() { - return this.collection.drop(); + return this.collection.drop().catch(e => { + if (e.code !== MONGODB_ERROR_CODES.NamespaceNotFound) { + throw e; + } + }); } function initBucket() { @@ -65,6 +74,33 @@ function makeLoadJSON(name) { }; } +function makeLoadTweets(makeId) { + return function () { + const doc = this.doc; + const tweets = []; + for (let _id = 1; _id <= 10000; _id += 1) { + tweets.push(Object.assign({}, doc, makeId ? { _id } : {})); + } + + return this.collection.insertMany(tweets); + }; +} + +function makeLoadInsertDocs(numberOfOperations) { + return function () { + this.docs = []; + for (let i = 0; i < numberOfOperations; i += 1) { + this.docs.push(Object.assign({}, this.doc)); + } + }; +} + +async function writeSingleByteFileToBucket() { + const stream = this.bucket.openUploadStream('setup-file.txt'); + const oneByteFile = Readable.from('a'); + return pipeline(oneByteFile, stream); +} + module.exports = { makeClient, connectClient, @@ -78,5 +114,8 @@ module.exports = { loadSpecFile, loadSpecString, initBucket, - dropBucket + dropBucket, + makeLoadTweets, + makeLoadInsertDocs, + writeSingleByteFileToBucket }; diff --git a/test/benchmarks/driverBench/index.js b/test/benchmarks/driverBench/index.js index 6b206e4087..c798e5d81b 100644 --- a/test/benchmarks/driverBench/index.js +++ b/test/benchmarks/driverBench/index.js @@ -1,338 +1,42 @@ 'use strict'; const MongoBench = require('../mongoBench'); -const { writeFile } = require('fs/promises'); const Runner = MongoBench.Runner; -const commonHelpers = require('./common'); -let BSON = require('bson'); +let bsonType = 'js-bson'; +// TODO(NODE-4606): test against different driver configurations in CI -try { - BSON = require('bson-ext'); -} catch (_) { - // do not care -} +const BSON = require('bson'); +// try { +// BSON = require('bson-ext'); +// bsonType = 'bson-ext'; +// } catch (_) { +// // do not care +// } -const { EJSON } = require('bson'); const { inspect } = require('util'); - -const makeClient = commonHelpers.makeClient; -const connectClient = commonHelpers.connectClient; -const disconnectClient = commonHelpers.disconnectClient; -const initDb = commonHelpers.initDb; -const dropDb = commonHelpers.dropDb; -const createCollection = commonHelpers.createCollection; -const initCollection = commonHelpers.initCollection; -const dropCollection = commonHelpers.dropCollection; -const makeLoadJSON = commonHelpers.makeLoadJSON; -const loadSpecString = commonHelpers.loadSpecString; -const loadSpecFile = commonHelpers.loadSpecFile; -const initBucket = commonHelpers.initBucket; -const dropBucket = commonHelpers.dropBucket; +const { writeFile } = require('fs/promises'); +const { + makeParallelBenchmarks, + makeBsonBench, + makeSingleBench, + makeMultiBench +} = require('../mongoBench/suites'); function average(arr) { return arr.reduce((x, y) => x + y, 0) / arr.length; } -function encodeBSON() { - for (let i = 0; i < 10000; i += 1) { - BSON.serialize(this.dataString); - } -} - -function decodeBSON() { - for (let i = 0; i < 10000; i += 1) { - BSON.deserialize(this.data); - } -} - -function makeBSONLoader(fileName) { - return function () { - this.dataString = EJSON.parse(loadSpecString(['extended_bson', `${fileName}.json`])); - this.data = BSON.serialize(this.dataString); - }; -} - -function loadGridFs() { - this.bin = loadSpecFile(['single_and_multi_document', 'gridfs_large.bin']); -} - -function makeTestInsertOne(numberOfOps) { - return function (done) { - const loop = _id => { - if (_id > numberOfOps) { - return done(); - } - - const doc = Object.assign({}, this.doc); - - this.collection.insertOne(doc, err => (err ? done(err) : loop(_id + 1))); - }; - - loop(1); - }; -} - -function makeLoadTweets(makeId) { - return function () { - const doc = this.doc; - const tweets = []; - for (let _id = 1; _id <= 10000; _id += 1) { - tweets.push(Object.assign({}, doc, makeId ? { _id } : {})); - } - - return this.collection.insertMany(tweets); - }; -} - -function makeLoadInsertDocs(numberOfOperations) { - return function () { - this.docs = []; - for (let i = 0; i < numberOfOperations; i += 1) { - this.docs.push(Object.assign({}, this.doc)); - } - }; -} - -function findOneById(done) { - const loop = _id => { - if (_id > 10000) { - return done(); - } - - return this.collection.findOne({ _id }, err => (err ? done(err) : loop(_id + 1))); - }; - - return loop(1); -} - -function runCommand(done) { - const loop = _id => { - if (_id > 10000) { - return done(); - } - return this.db.command({ hello: true }, err => (err ? done(err) : loop(_id + 1))); - }; - - return loop(1); -} - -function findManyAndEmptyCursor(done) { - return this.collection.find({}).forEach(() => {}, done); -} - -function docBulkInsert(done) { - return this.collection.insertMany(this.docs, { ordered: true }, done); -} - -function gridFsInitUploadStream() { - this.stream = this.bucket.openUploadStream('gridfstest'); -} - -function writeSingleByteToUploadStream() { - return new Promise((resolve, reject) => { - this.stream.write('\0', null, err => (err ? reject(err) : resolve())); - }); -} - const benchmarkRunner = new Runner() - .suite('bsonBench', suite => - suite - .benchmark('flatBsonEncoding', benchmark => - benchmark.taskSize(75.31).setup(makeBSONLoader('flat_bson')).task(encodeBSON) - ) - .benchmark('flatBsonDecoding', benchmark => - benchmark.taskSize(75.31).setup(makeBSONLoader('flat_bson')).task(decodeBSON) - ) - .benchmark('deepBsonEncoding', benchmark => - benchmark.taskSize(19.64).setup(makeBSONLoader('deep_bson')).task(encodeBSON) - ) - .benchmark('deepBsonDecoding', benchmark => - benchmark.taskSize(19.64).setup(makeBSONLoader('deep_bson')).task(decodeBSON) - ) - .benchmark('fullBsonEncoding', benchmark => - benchmark.taskSize(57.34).setup(makeBSONLoader('full_bson')).task(encodeBSON) - ) - .benchmark('fullBsonDecoding', benchmark => - benchmark.taskSize(57.34).setup(makeBSONLoader('full_bson')).task(decodeBSON) - ) - ) - .suite('singleBench', suite => - suite - .benchmark('runCommand', benchmark => - benchmark - .taskSize(0.16) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .task(runCommand) - .teardown(disconnectClient) - ) - .benchmark('findOne', benchmark => - benchmark - .taskSize(16.22) - .setup(makeLoadJSON('tweet.json')) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initCollection) - .setup(makeLoadTweets(true)) - .task(findOneById) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('smallDocInsertOne', benchmark => - benchmark - .taskSize(2.75) - .setup(makeLoadJSON('small_doc.json')) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .setup(createCollection) - .beforeTask(dropCollection) - .beforeTask(createCollection) - .beforeTask(initCollection) - .task(makeTestInsertOne(10000)) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('largeDocInsertOne', benchmark => - benchmark - .taskSize(27.31) - .setup(makeLoadJSON('large_doc.json')) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .setup(createCollection) - .beforeTask(dropCollection) - .beforeTask(createCollection) - .beforeTask(initCollection) - .task(makeTestInsertOne(10)) - .teardown(dropDb) - .teardown(disconnectClient) - ) - ) - .suite('multiBench', suite => - suite - .benchmark('findManyAndEmptyCursor', benchmark => - benchmark - .taskSize(16.22) - .setup(makeLoadJSON('tweet.json')) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initCollection) - .setup(makeLoadTweets(false)) - .task(findManyAndEmptyCursor) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('smallDocBulkInsert', benchmark => - benchmark - .taskSize(2.75) - .setup(makeLoadJSON('small_doc.json')) - .setup(makeLoadInsertDocs(10000)) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .setup(createCollection) - .beforeTask(dropCollection) - .beforeTask(createCollection) - .beforeTask(initCollection) - .task(docBulkInsert) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('largeDocBulkInsert', benchmark => - benchmark - .taskSize(27.31) - .setup(makeLoadJSON('large_doc.json')) - .setup(makeLoadInsertDocs(10)) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .setup(createCollection) - .beforeTask(dropCollection) - .beforeTask(createCollection) - .beforeTask(initCollection) - .task(docBulkInsert) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('gridFsUpload', benchmark => - benchmark - .taskSize(52.43) - .setup(loadGridFs) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .beforeTask(dropBucket) - .beforeTask(initBucket) - .beforeTask(gridFsInitUploadStream) - .beforeTask(writeSingleByteToUploadStream) - .task(function (done) { - this.stream.on('error', done).end(this.bin, null, () => done()); - }) - .teardown(dropDb) - .teardown(disconnectClient) - ) - .benchmark('gridFsDownload', benchmark => - benchmark - .taskSize(52.43) - .setup(loadGridFs) - .setup(makeClient) - .setup(connectClient) - .setup(initDb) - .setup(dropDb) - .setup(initDb) - .setup(initCollection) - .setup(dropBucket) - .setup(initBucket) - .setup(gridFsInitUploadStream) - .setup(function () { - return new Promise((resolve, reject) => { - this.stream.end(this.bin, null, err => { - if (err) { - return reject(err); - } - - this.id = this.stream.id; - this.stream = undefined; - resolve(); - }); - }); - }) - .task(function (done) { - this.bucket.openDownloadStream(this.id).resume().on('end', done); - }) - .teardown(dropDb) - .teardown(disconnectClient) - ) - ); + .suite('bsonBench', suite => makeBsonBench({ suite, BSON })) + .suite('singleBench', suite => makeSingleBench(suite)) + .suite('multiBench', suite => makeMultiBench(suite)) + .suite('parallel', suite => makeParallelBenchmarks(suite)); benchmarkRunner .run() .then(microBench => { - // TODO - test against different BSON versions in CI - // const bsonType = BSON.serialize.toString().includes('native code') ? 'bson-ext' : 'js-bson'; const bsonBench = average(Object.values(microBench.bsonBench)); const singleBench = average([ microBench.singleBench.findOne, @@ -341,23 +45,32 @@ benchmarkRunner ]); const multiBench = average(Object.values(microBench.multiBench)); - // TODO: add parallelBench - const parallelBench = NaN; + const parallelBench = average([ + microBench.parallel.ldjsonMultiFileUpload, + microBench.parallel.ldjsonMultiFileExport, + microBench.parallel.gridfsMultiFileUpload, + microBench.parallel.gridfsMultiFileDownload + ]); + const readBench = average([ microBench.singleBench.findOne, microBench.multiBench.findManyAndEmptyCursor, - microBench.multiBench.gridFsDownload - // TODO: Add parallelBench read benchmarks + microBench.multiBench.gridFsDownload, + microBench.parallel.gridfsMultiFileDownload, + microBench.parallel.ldjsonMultiFileExport ]); const writeBench = average([ microBench.singleBench.smallDocInsertOne, microBench.singleBench.largeDocInsertOne, microBench.multiBench.smallDocBulkInsert, microBench.multiBench.largeDocBulkInsert, - microBench.multiBench.gridFsUpload - // TODO: Add parallelBench write benchmarks + microBench.multiBench.gridFsUpload, + microBench.parallel.ldjsonMultiFileUpload, + microBench.parallel.gridfsMultiFileUpload ]); + const driverBench = average([readBench, writeBench]); + const benchmarkResults = { bsonBench, singleBench, @@ -365,6 +78,7 @@ benchmarkRunner parallelBench, readBench, writeBench, + driverBench, ...microBench.bsonBench, ...microBench.singleBench, ...microBench.multiBench @@ -373,7 +87,8 @@ benchmarkRunner return Object.entries(benchmarkResults).map(([benchmarkName, result]) => { return { info: { - test_name: benchmarkName + test_name: benchmarkName, + tags: [bsonType] }, metrics: [{ name: 'megabytes_per_second', value: result }] }; @@ -381,7 +96,7 @@ benchmarkRunner }) .then(data => { const results = JSON.stringify(data, undefined, 2); - console.error(inspect(results, { depth: Infinity })); + console.log(inspect(data, { depth: Infinity, colors: true })); return writeFile('results.json', results); }) .catch(err => console.error(err)); diff --git a/test/benchmarks/mongoBench/suites/bsonBench.js b/test/benchmarks/mongoBench/suites/bsonBench.js new file mode 100644 index 0000000000..3abbb3034a --- /dev/null +++ b/test/benchmarks/mongoBench/suites/bsonBench.js @@ -0,0 +1,49 @@ +const { EJSON } = require('bson'); +const { loadSpecString } = require('../../driverBench/common'); + +/** + * + * @param {{ suite: Suite, BSON: BSON }} options + * @returns {Benchmark} + */ +function makeBsonBench({ suite, BSON }) { + function encodeBSON() { + for (let i = 0; i < 10000; i += 1) { + BSON.serialize(this.dataString); + } + } + + function decodeBSON() { + for (let i = 0; i < 10000; i += 1) { + BSON.deserialize(this.data); + } + } + + function makeBSONLoader(fileName) { + return function () { + this.dataString = EJSON.parse(loadSpecString(['extended_bson', `${fileName}.json`])); + this.data = BSON.serialize(this.dataString); + }; + } + return suite + .benchmark('flatBsonEncoding', benchmark => + benchmark.taskSize(75.31).setup(makeBSONLoader('flat_bson')).task(encodeBSON) + ) + .benchmark('flatBsonDecoding', benchmark => + benchmark.taskSize(75.31).setup(makeBSONLoader('flat_bson')).task(decodeBSON) + ) + .benchmark('deepBsonEncoding', benchmark => + benchmark.taskSize(19.64).setup(makeBSONLoader('deep_bson')).task(encodeBSON) + ) + .benchmark('deepBsonDecoding', benchmark => + benchmark.taskSize(19.64).setup(makeBSONLoader('deep_bson')).task(decodeBSON) + ) + .benchmark('fullBsonEncoding', benchmark => + benchmark.taskSize(57.34).setup(makeBSONLoader('full_bson')).task(encodeBSON) + ) + .benchmark('fullBsonDecoding', benchmark => + benchmark.taskSize(57.34).setup(makeBSONLoader('full_bson')).task(decodeBSON) + ); +} + +module.exports = { makeBsonBench }; diff --git a/test/benchmarks/mongoBench/suites/index.js b/test/benchmarks/mongoBench/suites/index.js new file mode 100644 index 0000000000..bde03c4b1d --- /dev/null +++ b/test/benchmarks/mongoBench/suites/index.js @@ -0,0 +1,11 @@ +const { makeParallelBenchmarks } = require('./parallelBench'); +const { makeBsonBench } = require('./bsonBench'); +const { makeSingleBench } = require('./singleBench'); +const { makeMultiBench } = require('./multiBench'); + +module.exports = { + makeParallelBenchmarks, + makeBsonBench, + makeSingleBench, + makeMultiBench +}; diff --git a/test/benchmarks/mongoBench/suites/multiBench.js b/test/benchmarks/mongoBench/suites/multiBench.js new file mode 100644 index 0000000000..d247d290d4 --- /dev/null +++ b/test/benchmarks/mongoBench/suites/multiBench.js @@ -0,0 +1,147 @@ +const { + loadSpecFile, + makeLoadJSON, + makeClient, + connectClient, + initDb, + dropDb, + initCollection, + makeLoadTweets, + disconnectClient, + makeLoadInsertDocs, + createCollection, + dropCollection, + dropBucket, + initBucket +} = require('../../driverBench/common'); + +function loadGridFs() { + this.bin = loadSpecFile(['single_and_multi_document', 'gridfs_large.bin']); +} + +function findManyAndEmptyCursor(done) { + return this.collection.find({}).forEach(() => {}, done); +} + +function docBulkInsert(done) { + return this.collection.insertMany(this.docs, { ordered: true }, done); +} + +function gridFsInitUploadStream() { + this.stream = this.bucket.openUploadStream('gridfstest'); +} + +function writeSingleByteToUploadStream() { + return new Promise((resolve, reject) => { + this.stream.write('\0', null, err => (err ? reject(err) : resolve())); + }); +} +function makeMultiBench(suite) { + return suite + .benchmark('findManyAndEmptyCursor', benchmark => + benchmark + .taskSize(16.22) + .setup(makeLoadJSON('tweet.json')) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initCollection) + .setup(makeLoadTweets(false)) + .task(findManyAndEmptyCursor) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('smallDocBulkInsert', benchmark => + benchmark + .taskSize(2.75) + .setup(makeLoadJSON('small_doc.json')) + .setup(makeLoadInsertDocs(10000)) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(createCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .beforeTask(initCollection) + .task(docBulkInsert) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('largeDocBulkInsert', benchmark => + benchmark + .taskSize(27.31) + .setup(makeLoadJSON('large_doc.json')) + .setup(makeLoadInsertDocs(10)) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(createCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .beforeTask(initCollection) + .task(docBulkInsert) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('gridFsUpload', benchmark => + benchmark + .taskSize(52.43) + .setup(loadGridFs) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .beforeTask(dropBucket) + .beforeTask(initBucket) + .beforeTask(gridFsInitUploadStream) + .beforeTask(writeSingleByteToUploadStream) + .task(function (done) { + this.stream.on('error', done).end(this.bin, null, () => done()); + }) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('gridFsDownload', benchmark => + benchmark + .taskSize(52.43) + .setup(loadGridFs) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(dropBucket) + .setup(initBucket) + .setup(gridFsInitUploadStream) + .setup(function () { + return new Promise((resolve, reject) => { + this.stream.end(this.bin, null, err => { + if (err) { + return reject(err); + } + + this.id = this.stream.id; + this.stream = undefined; + resolve(); + }); + }); + }) + .task(function (done) { + this.bucket.openDownloadStream(this.id).resume().on('end', done); + }) + .teardown(dropDb) + .teardown(disconnectClient) + ); +} + +module.exports = { makeMultiBench }; diff --git a/test/benchmarks/mongoBench/suites/parallelBench.js b/test/benchmarks/mongoBench/suites/parallelBench.js new file mode 100644 index 0000000000..1a2a0d2a54 --- /dev/null +++ b/test/benchmarks/mongoBench/suites/parallelBench.js @@ -0,0 +1,199 @@ +'use strict'; + +const { createReadStream, createWriteStream } = require('fs'); +const { rm, mkdir, readdir } = require('fs/promises'); +const { resolve } = require('path'); +const { Readable } = require('stream'); +const readline = require('readline'); +const { + makeClient, + disconnectClient, + dropDb, + initBucket, + dropBucket, + initCollection, + initDb, + connectClient, + createCollection, + dropCollection +} = require('../../driverBench/common'); +const { pipeline } = require('stream/promises'); +const { EJSON } = require('bson'); + +const benchmarkFileDirectory = resolve(__dirname, '..', '..', 'driverBench', 'spec', 'parallel'); + +async function initTemporaryDirectory() { + const temporaryDirectory = resolve(benchmarkFileDirectory, 'downloads'); + await rm(temporaryDirectory, { recursive: true, force: true }); + await mkdir(temporaryDirectory); + this.temporaryDirectory = temporaryDirectory; +} + +async function clearTemporaryDirectory() { + const fileNames = await readdir(this.temporaryDirectory); + const files = fileNames.map(filename => resolve(this.temporaryDirectory, filename)); + + await Promise.all(files.map(file => rm(file))); +} + +async function ldjsonMultiUpload() { + const directory = resolve(benchmarkFileDirectory, 'ldjson_multi'); + const files = await readdir(directory); + const uploads = files.map(async file => { + const stream = createReadStream(resolve(directory, file)); + const lineReader = readline.createInterface({ + input: stream + }); + + const operations = []; + + for await (const line of lineReader) { + operations.push({ + insertOne: { + document: JSON.parse(line) + } + }); + } + + stream.close(); + lineReader.close(); + + return this.collection.bulkWrite(operations); + }); + + await Promise.all(uploads); +} + +async function ldjsonMultiExport() { + const skips = Array.from({ length: 100 }, (_, index) => index * 5000); + + const promises = skips.map(async skip => { + const documentCursor = this.collection.find({}, { skip, limit: 5000 }); + documentCursor.map(doc => EJSON.stringify(doc)); + const outputStream = createWriteStream(resolve(this.temporaryDirectory, `tmp-${skip}.txt`)); + return pipeline(documentCursor.stream(), outputStream); + }); + + await Promise.all(promises); +} + +async function gridfsMultiFileUpload() { + const directory = resolve(benchmarkFileDirectory, 'gridfs_multi'); + const files = await readdir(directory); + const uploadPromises = files.map(async filename => { + const file = resolve(directory, filename); + const fileStream = createReadStream(file); + const uploadStream = this.bucket.openUploadStream(file); + return pipeline(fileStream, uploadStream); + }); + await Promise.all(uploadPromises); +} + +async function gridfsMultiFileDownload() { + const files = await this.bucket + .find() + .map(({ _id }) => ({ + path: resolve(this.temporaryDirectory, `${_id}.txt`), + _id + })) + .toArray(); + + const downloads = files.map(async ({ _id, path }) => { + const fileStream = createWriteStream(path); + const downloadStream = this.bucket.openDownloadStream(_id); + return pipeline(downloadStream, fileStream); + }); + + await Promise.all(downloads); +} + +/** + * + * @param {Suite} suite + * @returns Benchmark + */ +function makeParallelBenchmarks(suite) { + return suite + .benchmark('ldjsonMultiFileUpload', benchmark => + // https://github.com/mongodb/specifications/blob/master/source/benchmarking/benchmarking.rst#ldjson-multi-file-import + benchmark + .taskSize(565) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .beforeTask(initCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .task(ldjsonMultiUpload) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('ldjsonMultiFileExport', benchmark => + // https://github.com/mongodb/specifications/blob/master/source/benchmarking/benchmarking.rst#ldjson-multi-file-export + benchmark + .taskSize(565) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .beforeTask(initCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .beforeTask(ldjsonMultiUpload) + .beforeTask(initTemporaryDirectory) + .task(ldjsonMultiExport) + .afterTask(clearTemporaryDirectory) + .teardown(dropDb) + .teardown(async function () { + await rm(this.temporaryDirectory, { recursive: true, force: true }); + }) + .teardown(disconnectClient) + ) + .benchmark('gridfsMultiFileUpload', benchmark => + // https://github.com/mongodb/specifications/blob/master/source/benchmarking/benchmarking.rst#gridfs-multi-file-upload + benchmark + .taskSize(262.144) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .beforeTask(dropBucket) + .beforeTask(initBucket) + .beforeTask(async function () { + const stream = this.bucket.openUploadStream('setup-file.txt'); + const oneByteFile = Readable.from('a'); + return pipeline(oneByteFile, stream); + }) + .task(gridfsMultiFileUpload) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('gridfsMultiFileDownload', benchmark => + // https://github.com/mongodb/specifications/blob/master/source/benchmarking/benchmarking.rst#gridfs-multi-file-download + benchmark + .taskSize(262.144) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(initTemporaryDirectory) + .setup(dropBucket) + .setup(initBucket) + .setup(gridfsMultiFileUpload) + .beforeTask(clearTemporaryDirectory) + .setup(initBucket) + .task(gridfsMultiFileDownload) + .teardown(dropDb) + .teardown(async function () { + await rm(this.temporaryDirectory, { recursive: true, force: true }); + }) + .teardown(disconnectClient) + ); +} + +module.exports = { makeParallelBenchmarks }; diff --git a/test/benchmarks/mongoBench/suites/singleBench.js b/test/benchmarks/mongoBench/suites/singleBench.js new file mode 100644 index 0000000000..60b9c30795 --- /dev/null +++ b/test/benchmarks/mongoBench/suites/singleBench.js @@ -0,0 +1,116 @@ +const { + makeClient, + connectClient, + initDb, + disconnectClient, + dropDb, + initCollection, + createCollection, + dropCollection, + makeLoadJSON, + makeLoadTweets +} = require('../../driverBench/common'); + +function makeTestInsertOne(numberOfOps) { + return function (done) { + const loop = _id => { + if (_id > numberOfOps) { + return done(); + } + + const doc = Object.assign({}, this.doc); + + this.collection.insertOne(doc, err => (err ? done(err) : loop(_id + 1))); + }; + + loop(1); + }; +} + +function findOneById(done) { + const loop = _id => { + if (_id > 10000) { + return done(); + } + + return this.collection.findOne({ _id }, err => (err ? done(err) : loop(_id + 1))); + }; + + return loop(1); +} + +function runCommand(done) { + const loop = _id => { + if (_id > 10000) { + return done(); + } + return this.db.command({ hello: true }, err => (err ? done(err) : loop(_id + 1))); + }; + + return loop(1); +} + +function makeSingleBench(suite) { + suite + .benchmark('runCommand', benchmark => + benchmark + .taskSize(0.16) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .task(runCommand) + .teardown(disconnectClient) + ) + .benchmark('findOne', benchmark => + benchmark + .taskSize(16.22) + .setup(makeLoadJSON('tweet.json')) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initCollection) + .setup(makeLoadTweets(true)) + .task(findOneById) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('smallDocInsertOne', benchmark => + benchmark + .taskSize(2.75) + .setup(makeLoadJSON('small_doc.json')) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(createCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .beforeTask(initCollection) + .task(makeTestInsertOne(10000)) + .teardown(dropDb) + .teardown(disconnectClient) + ) + .benchmark('largeDocInsertOne', benchmark => + benchmark + .taskSize(27.31) + .setup(makeLoadJSON('large_doc.json')) + .setup(makeClient) + .setup(connectClient) + .setup(initDb) + .setup(dropDb) + .setup(initDb) + .setup(initCollection) + .setup(createCollection) + .beforeTask(dropCollection) + .beforeTask(createCollection) + .beforeTask(initCollection) + .task(makeTestInsertOne(10)) + .teardown(dropDb) + .teardown(disconnectClient) + ); +} + +module.exports = { makeSingleBench };