diff --git a/etc/notes/CHANGES_5.0.0.md b/etc/notes/CHANGES_5.0.0.md index f90cebc250..0d907c8eb7 100644 --- a/etc/notes/CHANGES_5.0.0.md +++ b/etc/notes/CHANGES_5.0.0.md @@ -18,7 +18,7 @@ The following is a detailed collection of the changes in the major v5 release of ### Dot Notation Typescript Support Removed By Default -**NOTE** This is a **Typescript compile-time only** change. Dot notation in filters sent to MongoDB will still work the same. +**NOTE** This is a **Typescript compile-time only** change. Dot notation in filters sent to MongoDB will still work the same. Version 4.3.0 introduced Typescript support for dot notation in filter predicates. For example: diff --git a/package-lock.json b/package-lock.json index 4e0220d4e4..7ea96db49a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "4.13.0", "license": "Apache-2.0", "dependencies": { - "bson": "^4.7.0", + "bson": "^5.0.0-alpha.3", "mongodb-connection-string-url": "^2.6.0", "socks": "^2.7.1" }, @@ -3007,6 +3007,7 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, "funding": [ { "type": "github", @@ -3133,20 +3134,18 @@ } }, "node_modules/bson": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.0.tgz", - "integrity": "sha512-VrlEE4vuiO1WTpfof4VmaVolCVYkYTgB9iWgYNOrVlnifpME/06fhFRmONgBhClD5pFC1t9ZWqFUQEQAzY43bA==", - "dependencies": { - "buffer": "^5.6.0" - }, + "version": "5.0.0-alpha.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-5.0.0-alpha.3.tgz", + "integrity": "sha512-bxrSDfyd4NRL38sD72SRsQp00U2oW2fDuN+GiZfsXzZYzSMABWId2U/scSrLAjrMEmBno60BDAdzO/O1q+5rfg==", "engines": { - "node": ">=6.9.0" + "node": ">=14.20.1" } }, "node_modules/buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, "funding": [ { "type": "github", @@ -5504,6 +5503,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, "funding": [ { "type": "github", @@ -6876,6 +6876,18 @@ "node": ">=12.9.0" } }, + "node_modules/mongodb/node_modules/bson": { + "version": "4.7.2", + "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.2.tgz", + "integrity": "sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==", + "dev": true, + "dependencies": { + "buffer": "^5.6.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -11875,7 +11887,8 @@ "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true }, "binary-extensions": { "version": "2.2.0", @@ -11964,17 +11977,15 @@ } }, "bson": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.0.tgz", - "integrity": "sha512-VrlEE4vuiO1WTpfof4VmaVolCVYkYTgB9iWgYNOrVlnifpME/06fhFRmONgBhClD5pFC1t9ZWqFUQEQAzY43bA==", - "requires": { - "buffer": "^5.6.0" - } + "version": "5.0.0-alpha.3", + "resolved": "https://registry.npmjs.org/bson/-/bson-5.0.0-alpha.3.tgz", + "integrity": "sha512-bxrSDfyd4NRL38sD72SRsQp00U2oW2fDuN+GiZfsXzZYzSMABWId2U/scSrLAjrMEmBno60BDAdzO/O1q+5rfg==" }, "buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, "requires": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -13752,7 +13763,8 @@ "ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true }, "ignore": { "version": "5.2.0", @@ -14743,6 +14755,17 @@ "mongodb-connection-string-url": "^2.5.4", "saslprep": "^1.0.3", "socks": "^2.7.1" + }, + "dependencies": { + "bson": { + "version": "4.7.2", + "resolved": "https://registry.npmjs.org/bson/-/bson-4.7.2.tgz", + "integrity": "sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==", + "dev": true, + "requires": { + "buffer": "^5.6.0" + } + } } }, "mongodb-connection-string-url": { diff --git a/package.json b/package.json index e8a7de2975..f29a4dee0c 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "email": "dbx-node@mongodb.com" }, "dependencies": { - "bson": "^4.7.0", + "bson": "^5.0.0-alpha.3", "mongodb-connection-string-url": "^2.6.0", "socks": "^2.7.1" }, diff --git a/src/bson.ts b/src/bson.ts index 4c3d865741..8ef9bc2a82 100644 --- a/src/bson.ts +++ b/src/bson.ts @@ -2,8 +2,10 @@ import type { DeserializeOptions, SerializeOptions } from 'bson'; export { Binary, + BSON, BSONRegExp, BSONSymbol, + BSONType, calculateObjectSize, Code, DBRef, @@ -13,7 +15,6 @@ export { Double, Int32, Long, - Map, MaxKey, MinKey, ObjectId, @@ -21,13 +22,6 @@ export { Timestamp } from 'bson'; -// TODO(NODE-4867): fix with bson v5 -/** @internal */ -// eslint-disable-next-line @typescript-eslint/no-var-requires -const BSON = require('bson'); - -export { BSON }; - /** * BSON Serialization options. * @public @@ -42,6 +36,7 @@ export interface BSONSerializeOptions | 'allowObjectSmallerThanBufferSize' | 'index' | 'validation' + | 'useBigInt64' > { /** * Enabling the raw option will return a [Node.js Buffer](https://nodejs.org/api/buffer.html) diff --git a/src/cmap/auth/mongodb_aws.ts b/src/cmap/auth/mongodb_aws.ts index fc7f8bef8f..2585e688d0 100644 --- a/src/cmap/auth/mongodb_aws.ts +++ b/src/cmap/auth/mongodb_aws.ts @@ -11,7 +11,7 @@ import { MongoMissingCredentialsError, MongoRuntimeError } from '../../error'; -import { Callback, maxWireVersion, ns } from '../../utils'; +import { ByteUtils, Callback, maxWireVersion, ns } from '../../utils'; import { AuthContext, AuthProvider } from './auth_provider'; import { MongoCredentials } from './mongo_credentials'; import { AuthMechanism } from './providers'; @@ -108,7 +108,8 @@ export class MongoDBAWS extends AuthProvider { return; } - if (serverNonce.compare(nonce, 0, nonce.length, 0, nonce.length) !== 0) { + // TODO(NODE-4990) + if (!ByteUtils.equals(serverNonce.subarray(0, nonce.byteLength), nonce)) { // TODO(NODE-3483) callback(new MongoRuntimeError('Server nonce does not begin with client nonce')); return; @@ -130,7 +131,7 @@ export class MongoDBAWS extends AuthProvider { headers: { 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': body.length, - 'X-MongoDB-Server-Nonce': serverNonce.toString('base64'), + 'X-MongoDB-Server-Nonce': ByteUtils.toBase64(serverNonce), 'X-MongoDB-GS2-CB-Flag': 'n' }, path: '/', diff --git a/src/cmap/commands.ts b/src/cmap/commands.ts index 827fba1896..fa822aaac5 100644 --- a/src/cmap/commands.ts +++ b/src/cmap/commands.ts @@ -136,7 +136,7 @@ export class Query { } // Uses a single allocated buffer for the process, avoiding multiple memory allocations - toBin(): Buffer[] { + toBin(): Uint8Array[] { const buffers = []; let projection = null; @@ -550,7 +550,7 @@ export class Msg { return buffers; } - makeDocumentSegment(buffers: Buffer[], document: Document): number { + makeDocumentSegment(buffers: Uint8Array[], document: Document): number { const payloadTypeBuffer = Buffer.alloc(1); payloadTypeBuffer[0] = 0; @@ -561,7 +561,7 @@ export class Msg { return payloadTypeBuffer.length + documentBuffer.length; } - serializeBson(document: Document): Buffer { + serializeBson(document: Document): Uint8Array { return BSON.serialize(document, { checkKeys: this.checkKeys, serializeFunctions: this.serializeFunctions, diff --git a/src/index.ts b/src/index.ts index dadb64da5f..946d5fd03f 100644 --- a/src/index.ts +++ b/src/index.ts @@ -17,19 +17,19 @@ import { MongoClient } from './mongo_client'; import { CancellationToken } from './mongo_types'; import { ClientSession } from './sessions'; -/** @internal */ +/** @public */ export { BSON } from './bson'; export { Binary, BSONRegExp, BSONSymbol, + BSONType, Code, DBRef, Decimal128, Double, Int32, Long, - Map, MaxKey, MinKey, ObjectId, @@ -103,7 +103,6 @@ export { MongoErrorLabel } from './error'; export { ExplainVerbosity } from './explain'; export { LoggerLevel } from './logger'; export { ServerApiVersion } from './mongo_client'; -export { BSONType } from './mongo_types'; export { ReturnDocument } from './operations/find_and_modify'; export { ProfilingLevel } from './operations/set_profiling_level'; export { ReadConcernLevel } from './read_concern'; diff --git a/src/mongo_types.ts b/src/mongo_types.ts index 16f5740e95..0676a31c22 100644 --- a/src/mongo_types.ts +++ b/src/mongo_types.ts @@ -1,4 +1,4 @@ -import type { ObjectIdLike } from 'bson'; +import type { BSONType, ObjectIdLike } from 'bson'; import { EventEmitter } from 'events'; import type { @@ -158,33 +158,6 @@ export type BitwiseFilter = | Binary /** BinData bit mask */ | ReadonlyArray; /** `[ , , ... ]` */ -/** @public */ -export const BSONType = Object.freeze({ - double: 1, - string: 2, - object: 3, - array: 4, - binData: 5, - undefined: 6, - objectId: 7, - bool: 8, - date: 9, - null: 10, - regex: 11, - dbPointer: 12, - javascript: 13, - symbol: 14, - javascriptWithScope: 15, - int: 16, - timestamp: 17, - long: 18, - decimal: 19, - minKey: -1, - maxKey: 127 -} as const); - -/** @public */ -export type BSONType = typeof BSONType[keyof typeof BSONType]; /** @public */ export type BSONTypeAlias = keyof typeof BSONType; diff --git a/src/sessions.ts b/src/sessions.ts index 1774f7c780..ddc0e53f5a 100644 --- a/src/sessions.ts +++ b/src/sessions.ts @@ -30,6 +30,7 @@ import { ReadPreference } from './read_preference'; import { _advanceClusterTime, ClusterTime, TopologyType } from './sdam/common'; import { isTransactionCommand, Transaction, TransactionOptions, TxnState } from './transactions'; import { + ByteUtils, calculateDurationInMs, Callback, commandSupportsReadConcern, @@ -347,7 +348,7 @@ export class ClientSession extends TypedEventEmitter { return false; } - return this.id.id.buffer.equals(session.id.id.buffer); + return ByteUtils.equals(this.id.id.buffer, session.id.id.buffer); } /** diff --git a/src/utils.ts b/src/utils.ts index 61fc08ef7a..a161593bc2 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -37,10 +37,28 @@ import { W, WriteConcern, WriteConcernOptions } from './write_concern'; */ export type Callback = (error?: AnyError, result?: T) => void; -export const MAX_JS_INT = Number.MAX_SAFE_INTEGER + 1; - export type AnyOptions = Document; +export const ByteUtils = { + toLocalBufferType(this: void, buffer: Buffer | Uint8Array): Buffer { + return Buffer.isBuffer(buffer) + ? buffer + : Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); + }, + + equals(this: void, seqA: Uint8Array, seqB: Uint8Array) { + return ByteUtils.toLocalBufferType(seqA).equals(seqB); + }, + + compare(this: void, seqA: Uint8Array, seqB: Uint8Array) { + return ByteUtils.toLocalBufferType(seqA).compare(seqB); + }, + + toBase64(this: void, uint8array: Uint8Array) { + return ByteUtils.toLocalBufferType(uint8array).toString('base64'); + } +}; + /** * Throws if collectionName is not a valid mongodb collection namespace. * @internal @@ -1401,7 +1419,7 @@ export function compareObjectId(oid1?: ObjectId | null, oid2?: ObjectId | null): return 1; } - return oid1.id.compare(oid2.id); + return ByteUtils.compare(oid1.id, oid2.id); } export function parseInteger(value: unknown): number | null { diff --git a/test/integration/change-streams/change_streams.prose.test.ts b/test/integration/change-streams/change_streams.prose.test.ts index 2f8749bccb..531776adeb 100644 --- a/test/integration/change-streams/change_streams.prose.test.ts +++ b/test/integration/change-streams/change_streams.prose.test.ts @@ -429,7 +429,7 @@ describe('Change Stream prose tests', function () { // Helpers timestamp() { - return new Timestamp(this._timestampCounter++, Date.now()); + return new Timestamp({ i: this._timestampCounter++, t: this._timestampCounter }); } applyOpTime(obj) { diff --git a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts index 364dd32030..6431c70052 100644 --- a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts +++ b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts @@ -35,14 +35,6 @@ const skippedAuthTests = [ 'unset works with an encrypted field', 'updateOne with deterministic encryption', 'updateMany with deterministic encryption', - 'type=date', - 'type=regex', - 'type=timestamp', - 'type=javascript', - 'type=binData', - 'type=int', - 'type=objectId', - 'type=symbol', 'replaceOne with encryption', 'Insert with encryption on a missing key', 'A local schema should override', diff --git a/test/integration/crud/insert.test.js b/test/integration/crud/insert.test.js index 268a282eb3..4602a0af4e 100644 --- a/test/integration/crud/insert.test.js +++ b/test/integration/crud/insert.test.js @@ -4,7 +4,6 @@ const { format: f } = require('util'); const { expect } = require('chai'); const Script = require('vm'); -const { normalizedFunctionString } = require('bson/lib/parser/utils'); const { Long, @@ -143,41 +142,20 @@ describe('crud - insert', function () { } }); - it('shouldCorrectlyHandleMultipleDocumentInsert', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, + it('insertMany returns the insertedIds and we can look up the documents', async function () { + const db = client.db(); + const collection = db.collection('test_multiple_insert'); + const docs = [{ a: 1 }, { a: 2 }]; - test: function (done) { - var configuration = this.configuration; - var client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); - client.connect(function (err, client) { - var db = client.db(configuration.db); - var collection = db.collection('test_multiple_insert'); - var docs = [{ a: 1 }, { a: 2 }]; - - collection.insert(docs, configuration.writeConcernMax(), function (err, r) { - expect(r).property('insertedCount').to.equal(2); - test.ok(r.insertedIds[0]._bsontype === 'ObjectID'); - test.ok(r.insertedIds[1]._bsontype === 'ObjectID'); - - // Let's ensure we have both documents - collection.find().toArray(function (err, docs) { - test.equal(2, docs.length); - var results = []; - // Check that we have all the results we want - docs.forEach(function (doc) { - if (doc.a === 1 || doc.a === 2) results.push(1); - }); - test.equal(2, results.length); - // Let's close the db - client.close(done); - }); - }); - }); - } + const r = await collection.insertMany(docs); + expect(r).property('insertedCount').to.equal(2); + expect(r.insertedIds[0]).to.have.property('_bsontype', 'ObjectId'); + expect(r.insertedIds[1]).to.have.property('_bsontype', 'ObjectId'); + + const foundDocs = await collection.find().toArray(); + expect(foundDocs).to.have.lengthOf(2); + expect(foundDocs).to.have.nested.property('[0].a', 1); + expect(foundDocs).to.have.nested.property('[1].a', 2); }); it('shouldCorrectlyInsertAndRetrieveLargeIntegratedArrayDocument', { @@ -563,70 +541,6 @@ describe('crud - insert', function () { } }); - it('shouldThrowErrorIfSerializingFunctionOrdered', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var configuration = this.configuration; - var db = client.db(configuration.db); - var collection = db.collection('test_should_throw_error_if_serializing_function'); - var func = function () { - return 1; - }; - // Insert the update - collection.insert( - { i: 1, z: func }, - { writeConcern: { w: 1 }, serializeFunctions: true }, - function (err, result) { - expect(err).to.not.exist; - - collection.findOne({ _id: result.insertedIds[0] }, function (err, object) { - expect(err).to.not.exist; - test.equal(normalizedFunctionString(func), object.z.code); - test.equal(1, object.i); - client.close(done); - }); - } - ); - } - }); - - it('shouldThrowErrorIfSerializingFunctionUnOrdered', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var configuration = this.configuration; - var db = client.db(configuration.db); - var collection = db.collection('test_should_throw_error_if_serializing_function_1'); - var func = function () { - return 1; - }; - // Insert the update - collection.insert( - { i: 1, z: func }, - { writeConcern: { w: 1 }, serializeFunctions: true, ordered: false }, - function (err, result) { - expect(err).to.not.exist; - - collection.findOne({ _id: result.insertedIds[0] }, function (err, object) { - expect(err).to.not.exist; - test.equal(normalizedFunctionString(func), object.z.code); - test.equal(1, object.i); - client.close(done); - }); - } - ); - } - }); - it('shouldCorrectlyInsertDocumentWithUUID', { // Add a tag that our runner can trigger on // in this case we are setting that node needs to be higher than 0.10.X to run @@ -927,43 +841,25 @@ describe('crud - insert', function () { } }); - it('Should correctly insert object with timestamps', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, + it('inserts and retrieves objects with timestamps', async function () { + const doc = { + _id: new ObjectId('4e886e687ff7ef5e00000162'), + str: 'foreign', + type: 2, + timestamp: new Timestamp({ i: 10000, t: 0 }), + links: [ + 'http://www.reddit.com/r/worldnews/comments/kybm0/uk_home_secretary_calls_for_the_scrapping_of_the/' + ], + timestamp2: new Timestamp({ i: 33333, t: 0 }) + }; - test: function (done) { - var configuration = this.configuration; - var doc = { - _id: new ObjectId('4e886e687ff7ef5e00000162'), - str: 'foreign', - type: 2, - timestamp: new Timestamp(10000), - links: [ - 'http://www.reddit.com/r/worldnews/comments/kybm0/uk_home_secretary_calls_for_the_scrapping_of_the/' - ], - timestamp2: new Timestamp(33333) - }; + const db = client.db(); + const collection = db.collection('Should_correctly_insert_object_with_timestamps'); - var client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); - client.connect(function (err, client) { - var db = client.db(configuration.db); - var collection = db.collection('Should_correctly_insert_object_with_timestamps'); - - collection.insert(doc, configuration.writeConcernMax(), function (err, result) { - test.ok(err == null); - test.ok(result); - - collection.findOne(function (err, item) { - test.ok(err == null); - test.deepEqual(doc, item); - client.close(done); - }); - }); - }); - } + const { insertedId } = await collection.insertOne(doc); + expect(insertedId.equals(doc._id)).to.be.true; + const result = await collection.findOne({ timestamp: new Timestamp({ i: 10000, t: 0 }) }); + expect(result).to.deep.equal(doc); }); it('Should Correctly allow for control of serialization of functions on command level', { @@ -1679,40 +1575,21 @@ describe('crud - insert', function () { } }); - it('mixedTimestampAndDateQuery', { - // Add a tag that our runner can trigger on - // in this case we are setting that node needs to be higher than 0.10.X to run - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var configuration = this.configuration; - var client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); - client.connect(function (err, client) { - var db = client.db(configuration.db); - var collection = db.collection('timestamp_date'); + it('lookups for timestamp and date work', async function () { + const db = client.db(); + const collection = db.collection('timestamp_date'); - var d = new Date(); - var documents = [{ x: new Timestamp(1, 2) }, { x: d }]; + const d = new Date(); + const documents = [{ x: new Timestamp({ i: 1, t: 2 }) }, { x: d }]; - collection.insert(documents, configuration.writeConcernMax(), function (err, result) { - expect(err).to.not.exist; - test.ok(result); + const result = await collection.insertMany(documents); + test.ok(result); - collection.findOne({ x: new Timestamp(1, 2) }, function (err, doc) { - expect(err).to.not.exist; - test.ok(doc != null); + const doc = await collection.findOne({ x: new Timestamp({ i: 1, t: 2 }) }); + expect(doc).to.not.be.null; - collection.findOne({ x: d }, function (err, doc) { - expect(err).to.not.exist; - test.ok(doc != null); - client.close(done); - }); - }); - }); - }); - } + const docDate = await collection.findOne({ x: d }); + expect(docDate).to.not.be.null; }); it('positiveAndNegativeInfinity', { diff --git a/test/integration/objectid.test.js b/test/integration/objectid.test.js deleted file mode 100644 index 69f5eced09..0000000000 --- a/test/integration/objectid.test.js +++ /dev/null @@ -1,182 +0,0 @@ -'use strict'; -var test = require('./shared').assert; -const { expect } = require('chai'); -const { ObjectId } = require('../mongodb'); -const { sleep } = require('../tools/utils'); - -describe('ObjectId', function () { - let client; - beforeEach(async function () { - client = this.configuration.newClient(); - }); - - afterEach(async function () { - await client.close(); - }); - - it('generates new ObjectId for documents without _id property', async function () { - const db = client.db(); - const collection = db.collection('test_object_id_generation'); - await collection.drop().catch(() => null); - - const documents = [{ a: 1 }, { a: 1 }, { a: 1 }]; - - const parallelInserts = await Promise.all([ - collection.insertOne(documents[0]), - collection.insertOne(documents[1]), - collection.insertOne(documents[2]) - ]); - - expect(parallelInserts).to.have.lengthOf(3); - - // Input documents are modified - expect(documents[0]).to.have.deep.property('_id', parallelInserts[0].insertedId); - expect(documents[1]).to.have.deep.property('_id', parallelInserts[1].insertedId); - expect(documents[2]).to.have.deep.property('_id', parallelInserts[2].insertedId); - - // ObjectIds are generated in a predictable order - expect(documents[0]._id.id.compare(documents[1]._id.id)).to.equal(-1); - expect(documents[1]._id.id.compare(documents[2]._id.id)).to.equal(-1); - expect(documents[2]._id.id.compare(documents[0]._id.id)).to.equal(1); - }); - - it('shouldCorrectlyRetrieve24CharacterHexStringFromToString', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - // Create a new ObjectId - var objectId = new ObjectId(); - // Verify that the hex string is 24 characters long - test.equal(24, objectId.toString().length); - done(); - } - }); - - it('shouldCorrectlyRetrieve24CharacterHexStringFromToJSON', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - // Create a new ObjectId - var objectId = new ObjectId(); - // Verify that the hex string is 24 characters long - test.equal(24, objectId.toJSON().length); - done(); - } - }); - - it('shouldCorrectlyCreateOIDNotUsingObjectId', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var configuration = this.configuration; - var client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); - var db = client.db(configuration.db); - var collection = db.collection('test_non_oid_id'); - var date = new Date(); - date.setUTCDate(12); - date.setUTCFullYear(2009); - date.setUTCMonth(11 - 1); - date.setUTCHours(12); - date.setUTCMinutes(0); - date.setUTCSeconds(30); - - collection.insert({ _id: date }, { writeConcern: { w: 1 } }, function (err) { - expect(err).to.not.exist; - collection.find({ _id: date }).toArray(function (err, items) { - test.equal('' + date, '' + items[0]._id); - - // Let's close the db - client.close(done); - }); - }); - } - }); - - it('shouldCorrectlyGenerateObjectIdFromTimestamp', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var timestamp = Math.floor(new Date().getTime() / 1000); - var objectID = new ObjectId(timestamp); - var time2 = objectID.generationTime; - test.equal(timestamp, time2); - done(); - } - }); - - it('shouldCorrectlyCreateAnObjectIdAndOverrideTheTimestamp', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: function (done) { - var timestamp = 1000; - var objectID = new ObjectId(); - var id1 = objectID.id; - // Override the timestamp - objectID.generationTime = timestamp; - var id2 = objectID.id; - - // Check the timestamp - if (id1 instanceof Buffer && id2 instanceof Buffer) { - test.deepEqual(id1.slice(0, 4), id2.slice(0, 4)); - } else { - test.equal(id1.substr(4), id2.substr(4)); - } - - done(); - } - }); - - it('shouldCorrectlyInsertWithObjectId', { - metadata: { - requires: { topology: ['single', 'replicaset', 'sharded', 'ssl', 'heap', 'wiredtiger'] } - }, - - test: async function () { - const client = this.configuration.newClient(this.configuration.writeConcernMax(), { - maxPoolSize: 1 - }); - - const db = client.db(this.configuration.db); - const collection = db.collection('shouldCorrectlyInsertWithObjectId'); - await collection.insertMany([{}], { writeConcern: { w: 1 } }); - const firstCompareDate = new Date(); - - await sleep(200); - - await collection.insertMany([{}], { writeConcern: { w: 1 } }); - const secondCompareDate = new Date(); - - const items = await collection.find().toArray(); - // Date 1 - const date1 = new Date(); - date1.setTime(items[0]._id.generationTime * 1000); - // Date 2 - const date2 = new Date(); - date2.setTime(items[1]._id.generationTime * 1000); - - // Compare - test.equal(firstCompareDate.getFullYear(), date1.getFullYear()); - test.equal(firstCompareDate.getDate(), date1.getDate()); - test.equal(firstCompareDate.getMonth(), date1.getMonth()); - test.equal(firstCompareDate.getHours(), date1.getHours()); - - test.equal(secondCompareDate.getFullYear(), date2.getFullYear()); - test.equal(secondCompareDate.getDate(), date2.getDate()); - test.equal(secondCompareDate.getMonth(), date2.getMonth()); - test.equal(secondCompareDate.getHours(), date2.getHours()); - - // Let's close the db - await client.close(); - } - }); -}); diff --git a/test/integration/objectid.test.ts b/test/integration/objectid.test.ts new file mode 100644 index 0000000000..1a6b3c4af4 --- /dev/null +++ b/test/integration/objectid.test.ts @@ -0,0 +1,144 @@ +import { expect } from 'chai'; + +import { Collection, Db, MongoClient, ObjectId } from '../../src'; +import { sleep } from '../tools/utils'; + +// TODO(NODE-4989): Improve these tests, likely can be made unit tests, or migrated to CRUD coverage (find oid range) +describe('ObjectId', function () { + let client: MongoClient; + let collection: Collection<{ name: string }>; + let commandStartedEvents; + let commandSucceededEvents; + beforeEach(async function () { + client = this.configuration.newClient({ monitorCommands: true }); + await client + .db() + .collection('oid_test') + .drop() + .catch(() => null); + collection = client.db().collection('oid_test'); + commandStartedEvents = []; + commandSucceededEvents = []; + client.on('commandStarted', e => commandStartedEvents.push(e)); + client.on('commandSucceeded', e => commandSucceededEvents.push(e)); + }); + + afterEach(async function () { + await client.close(); + }); + + it('generated objectId returns inserted document when cloned via hex string', async function () { + const { insertedId } = await collection.insertOne({ name: 'toph' }); + expect(insertedId).to.have.property('_bsontype', 'ObjectId'); + + const found = await collection.findOne({ _id: new ObjectId(insertedId.toHexString()) }); + expect(found).to.have.property('name', 'toph'); + expect(found).to.have.property('_id'); + expect(found?._id.toHexString()).to.equal(insertedId.toHexString()); + }); + + it('ObjectId toString returns 24 character string', () => { + const objectId = new ObjectId(); + expect(objectId.toString()).to.have.lengthOf(24); + }); + + it('ObjectId toJSON returns 24 character string', function () { + const objectId = new ObjectId(); + expect(objectId.toJSON()).to.have.lengthOf(24); + }); + + it('Date can be used as a primary key _id', async function () { + // This has nothing to do with ObjectId + const configuration = this.configuration; + const client = configuration.newClient(configuration.writeConcernMax(), { maxPoolSize: 1 }); + const db: Db = client.db(configuration.db); + const collection = db.collection<{ _id: Date }>('test_non_oid_id'); + const date = new Date(); + date.setUTCDate(12); + date.setUTCFullYear(2009); + date.setUTCMonth(11 - 1); + date.setUTCHours(12); + date.setUTCMinutes(0); + date.setUTCSeconds(30); + + await collection.insertOne({ _id: date }, { writeConcern: { w: 1 } }); + const items = await collection.find({ _id: date }).toArray(); + expect('' + date).to.equal('' + items[0]._id); + + // Let's close the db + await client.close(); + }); + + it('getTimestamp should return date equal to input Date', function () { + const date = new Date(); + // ObjectId timestamp is only in seconds + date.setMilliseconds(0); + + const epochSeconds = date.getTime() / 1000; + const oid = new ObjectId(epochSeconds); + const time = oid.getTimestamp(); + + expect(time).to.deep.equal(date); + expect(time.getTime() / 1000).to.deep.equal(epochSeconds); + }); + + it('range query based on objectId timestamp', async () => { + const oid1 = new ObjectId(); + await sleep(1000); + const oid2 = new ObjectId(); + await sleep(1000); + const oid3 = new ObjectId(); + + const collection = client.db().collection<{ _id: ObjectId }>('oid_range'); + await collection.drop().catch(() => null); + + // Insertion intentionally out of order, we want to filter out 3 with a range query + await collection.insertMany([{ _id: oid1 }, { _id: oid3 }, { _id: oid2 }]); + + // Greater than or equal to the time in oid1 + const $gte = ObjectId.createFromTime(oid1.getTimestamp().getTime() / 1000); + // Strictly less than the time in oid3 + const $lt = ObjectId.createFromTime(oid3.getTimestamp().getTime() / 1000); + + const found = await collection.find({ _id: { $gte, $lt } }).toArray(); + expect(found).to.have.lengthOf(2); + expect(found).to.have.deep.nested.property('[0]._id', oid1); + expect(found).to.have.deep.nested.property('[1]._id', oid2); + }); + + it('timestamp section of ObjectId should translate to Date', async function () { + const client = this.configuration.newClient(this.configuration.writeConcernMax(), { + maxPoolSize: 1 + }); + + const db = client.db(this.configuration.db); + const collection = db.collection('shouldCorrectlyInsertWithObjectId'); + await collection.insertMany([{}], { writeConcern: { w: 1 } }); + const firstCompareDate = new Date(); + + await sleep(200); + + await collection.insertMany([{}], { writeConcern: { w: 1 } }); + const secondCompareDate = new Date(); + + const items = await collection.find().toArray(); + // Date 1 + const date1 = items[0]._id.getTimestamp(); + // Date 2 + const date2 = items[1]._id.getTimestamp(); + + // Compare + expect(firstCompareDate.getFullYear()).to.equal(date1.getFullYear()); + expect(firstCompareDate.getDate()).to.equal(date1.getDate()); + expect(firstCompareDate.getMonth()).to.equal(date1.getMonth()); + expect(firstCompareDate.getHours()).to.equal(date1.getHours()); + + expect(secondCompareDate.getFullYear()).to.equal(date2.getFullYear()); + expect(secondCompareDate.getDate()).to.equal(date2.getDate()); + expect(secondCompareDate.getMonth()).to.equal(date2.getMonth()); + expect(secondCompareDate.getHours()).to.equal(date2.getHours()); + + // Let's close the db + await client.close(); + }); +}); diff --git a/test/tools/common.js b/test/tools/common.js index 6d16694f3e..ed1ad378eb 100644 --- a/test/tools/common.js +++ b/test/tools/common.js @@ -121,7 +121,7 @@ class ReplSetFixture { */ function genClusterTime(time) { return { - clusterTime: new BSON.Timestamp(time), + clusterTime: new BSON.Timestamp(BSON.Long.fromNumber(time, true)), signature: { hash: new BSON.Binary('test'), keyId: new BSON.Long(1) } }; } diff --git a/test/tools/spec-runner/matcher.js b/test/tools/spec-runner/matcher.js index 6ea9b5a0cd..8a842644a1 100644 --- a/test/tools/spec-runner/matcher.js +++ b/test/tools/spec-runner/matcher.js @@ -34,6 +34,16 @@ function generateMatchAndDiffSpecialCase(key, expectedObj, actualObj, metadata) }; } + if (actualObj._bsontype === 'Code' && key === 'scope') { + // HACK: Code always has a scope property, it can sometimes be null, + // the "does not exist" logic does not handle that + return { + match: actualObj.scope == null, + expected: expectedObj, + actual: actualObj + }; + } + const match = !Object.prototype.hasOwnProperty.call(actualObj, key); return { match, diff --git a/test/unit/bson.test.js b/test/unit/bson.test.ts similarity index 85% rename from test/unit/bson.test.js rename to test/unit/bson.test.ts index 19e1407a5b..6218933c3b 100644 --- a/test/unit/bson.test.js +++ b/test/unit/bson.test.ts @@ -1,14 +1,13 @@ -'use strict'; +import { expect } from 'chai'; -const { expect } = require('chai'); -const BSON = require('../mongodb'); +import { BSON } from '../mongodb'; describe('When importing BSON', function () { const types = [ ['Long', 23], ['ObjectId', '123456789123456789123456'], ['Binary', Buffer.from('abc', 'ascii')], - ['Timestamp', 23], + ['Timestamp', 23n], ['Code', 'function(){}'], ['MinKey', undefined], ['MaxKey', undefined], @@ -16,7 +15,7 @@ describe('When importing BSON', function () { ['Int32', 23], ['Double', 2.3], ['BSONRegExp', 'abc'] - ]; + ] as const; // Omitted types since they're deprecated: // BSONSymbol // DBRef @@ -38,8 +37,8 @@ describe('When importing BSON', function () { } it('should correctly round trip Map', function () { - expect(BSON.Map).to.be.a('function'); - const doc = { key: new BSON.Map([['2', 2]]) }; + expect(Map).to.be.a('function'); + const doc = { key: new Map([['2', 2]]) }; const outputDoc = BSON.deserialize(BSON.serialize(doc)); expect(outputDoc).to.have.nested.property('key.2', 2); }); diff --git a/test/unit/index.test.ts b/test/unit/index.test.ts index 0e0f1f6819..1db9ba4665 100644 --- a/test/unit/index.test.ts +++ b/test/unit/index.test.ts @@ -63,7 +63,6 @@ const EXPECTED_EXPORTS = [ 'Logger', 'LoggerLevel', 'Long', - 'Map', 'MaxKey', 'MinKey', 'MongoAPIError', diff --git a/test/unit/utils.test.ts b/test/unit/utils.test.ts index 0a2eabfba9..ad587853ba 100644 --- a/test/unit/utils.test.ts +++ b/test/unit/utils.test.ts @@ -2,6 +2,7 @@ import { expect } from 'chai'; import { BufferPool, + ByteUtils, compareObjectId, eachAsync, HostAddress, @@ -819,4 +820,83 @@ describe('driver utils', function () { it(title, () => expect(compareObjectId(oid1, oid2)).to.equal(result)); } }); + + context('const ByteUtils', () => { + context('toLocalBufferType()', () => { + it('returns identical Node.js buffer instance when input is Buffer', () => { + const buffer = Buffer.from([1, 2, 3]); + // Note: **Not** a deep.equal check + expect(ByteUtils.toLocalBufferType(buffer)).to.equal(buffer); + }); + + it('returns new Node.js buffer instance when input is Uint8Array', () => { + const uint8array = new Uint8Array([1, 2, 3]); + expect(Buffer.isBuffer(ByteUtils.toLocalBufferType(uint8array))).to.be.true; + }); + + it('does not clone ArrayBuffer when creating a new Node.js Buffer', () => { + const uint8array = new Uint8Array([1, 2, 3]); + // Note: **Not** a deep.equal check + expect(ByteUtils.toLocalBufferType(uint8array).buffer).to.equal(uint8array.buffer); + }); + }); + + context('equals()', () => { + it('is a function', () => expect(ByteUtils).property('equals').is.a('function')); + + it('returns true for equal Buffer or Uint8Array', () => { + const buffer = Buffer.from([1, 2, 3]); + const uint8array = new Uint8Array([1, 2, 3]); + + expect(ByteUtils.equals(buffer, uint8array)).to.be.true; + expect(ByteUtils.equals(uint8array, buffer)).to.be.true; + expect(ByteUtils.equals(uint8array, uint8array)).to.be.true; + expect(ByteUtils.equals(buffer, buffer)).to.be.true; + }); + + it('returns false for nonequal Buffer or Uint8Array', () => { + const buffer = Buffer.from([1, 2, 3]); + const uint8array = new Uint8Array([1, 2, 4]); + + expect(ByteUtils.equals(buffer, uint8array)).to.be.false; + expect(ByteUtils.equals(uint8array, buffer)).to.be.false; + }); + }); + + context('compare()', () => { + it('is a function', () => expect(ByteUtils).property('compare').is.a('function')); + + it('returns 0 for equal Buffer or Uint8Array', () => { + const buffer = Buffer.from([1, 2, 3]); + const uint8array = new Uint8Array([1, 2, 3]); + + expect(ByteUtils.compare(buffer, uint8array)).to.equal(0); + expect(ByteUtils.compare(uint8array, buffer)).to.equal(0); + expect(ByteUtils.compare(uint8array, uint8array)).to.equal(0); + expect(ByteUtils.compare(buffer, buffer)).to.equal(0); + }); + + it('returns +/- 1 for Buffer or UInt8Array if one is greater or less than', () => { + const buffer = Buffer.from([1, 2, 3]); + const uint8array = new Uint8Array([1, 2, 4]); + + expect(ByteUtils.compare(buffer, uint8array)).to.equal(-1); + expect(ByteUtils.compare(uint8array, buffer)).to.equal(1); + }); + }); + + context('toBase64()', () => { + it('is a function', () => expect(ByteUtils).property('toBase64').is.a('function')); + + const oneTwoThreeBase64 = 'AQID'; + + it('converts a Buffer to a base64 string', () => { + expect(ByteUtils.toBase64(Buffer.from([1, 2, 3]))).to.equal(oneTwoThreeBase64); + }); + + it('converts a Uint8Array to a base64 string', () => { + expect(ByteUtils.toBase64(new Uint8Array([1, 2, 3]))).to.equal(oneTwoThreeBase64); + }); + }); + }); });