diff --git a/test/functional/connection.test.js b/test/functional/connection.test.js index 5e9c62a45ed..19d3544e577 100644 --- a/test/functional/connection.test.js +++ b/test/functional/connection.test.js @@ -144,9 +144,9 @@ describe('Connection - functional', function () { metadata: { requires: { topology: 'single' } }, test: function (done) { - var configuration = this.configuration; - var user = 'testConnectGoodAuth', - password = 'password'; + const configuration = this.configuration; + const username = 'testConnectGoodAuth'; + const password = 'password'; const setupClient = configuration.newClient(); @@ -155,14 +155,14 @@ describe('Connection - functional', function () { expect(err).to.not.exist; var db = client.db(configuration.db); - db.addUser(user, password, function (err) { + db.addUser(username, password, function (err) { expect(err).to.not.exist; client.close(restOfTest); }); }); function restOfTest() { - const testClient = configuration.newClient(configuration.url(user, password)); + const testClient = configuration.newClient(configuration.url({ username, password })); testClient.connect( connectionTester(configuration, 'testConnectGoodAuth', function (client) { client.close(done); @@ -176,7 +176,7 @@ describe('Connection - functional', function () { metadata: { requires: { topology: 'single' } }, test: function (done) { - var configuration = this.configuration; + const configuration = this.configuration; const username = 'testConnectGoodAuthAsOption'; const password = 'password'; @@ -211,7 +211,9 @@ describe('Connection - functional', function () { test: function (done) { var configuration = this.configuration; - const client = configuration.newClient(configuration.url('slithy', 'toves')); + const client = configuration.newClient( + configuration.url({ username: 'slithy', password: 'toves' }) + ); client.connect(function (err, client) { expect(err).to.exist; expect(client).to.not.exist; diff --git a/test/functional/spec-runner/utils.js b/test/functional/spec-runner/utils.js index dfc0d30ed37..a4112f2cc08 100644 --- a/test/functional/spec-runner/utils.js +++ b/test/functional/spec-runner/utils.js @@ -3,7 +3,7 @@ function resolveConnectionString(configuration, spec, context) { const isShardedEnvironment = configuration.topologyType === 'Sharded'; const useMultipleMongoses = spec && !!spec.useMultipleMongoses; - const user = context && context.user; + const username = context && context.user; const password = context && context.password; const authSource = context && context.authSource; const connectionString = @@ -11,7 +11,7 @@ function resolveConnectionString(configuration, spec, context) { ? `mongodb://${configuration.host}:${configuration.port}/${ configuration.db }?directConnection=false${authSource ? '&authSource=${authSource}' : ''}` - : configuration.url(user, password, { authSource }); + : configuration.url({ username, password, authSource }); return connectionString; } diff --git a/test/functional/unified-spec-runner/entities.ts b/test/functional/unified-spec-runner/entities.ts index 32ee805b44a..11027541bf0 100644 --- a/test/functional/unified-spec-runner/entities.ts +++ b/test/functional/unified-spec-runner/entities.ts @@ -1,4 +1,7 @@ import { MongoClient, Db, Collection, GridFSBucket, Document } from '../../../src/index'; +import { ReadConcern } from '../../../src/read_concern'; +import { WriteConcern } from '../../../src/write_concern'; +import { ReadPreference } from '../../../src/read_preference'; import { ClientSession } from '../../../src/sessions'; import { ChangeStream } from '../../../src/change_stream'; import type { ClientEntity, EntityDescription } from './schema'; @@ -8,13 +11,17 @@ import type { CommandSucceededEvent } from '../../../src/cmap/events'; import { patchCollectionOptions, patchDbOptions } from './unified-utils'; -import { TestConfiguration } from './unified.test'; import { expect } from 'chai'; +import { TestConfiguration } from './runner'; interface UnifiedChangeStream extends ChangeStream { eventCollector: InstanceType; } +interface UnifiedClientSession extends ClientSession { + client: UnifiedMongoClient; +} + export type CommandEvent = CommandStartedEvent | CommandSucceededEvent | CommandFailedEvent; export class UnifiedMongoClient extends MongoClient { @@ -85,7 +92,7 @@ export type Entity = | UnifiedMongoClient | Db | Collection - | ClientSession + | UnifiedClientSession | UnifiedChangeStream | GridFSBucket | Document; // Results from operations @@ -112,7 +119,7 @@ export class EntitiesMap extends Map { mapOf(type: 'client'): EntitiesMap; mapOf(type: 'db'): EntitiesMap; mapOf(type: 'collection'): EntitiesMap; - mapOf(type: 'session'): EntitiesMap; + mapOf(type: 'session'): EntitiesMap; mapOf(type: 'bucket'): EntitiesMap; mapOf(type: 'stream'): EntitiesMap; mapOf(type: EntityTypeId): EntitiesMap { @@ -126,13 +133,13 @@ export class EntitiesMap extends Map { getEntity(type: 'client', key: string, assertExists?: boolean): UnifiedMongoClient; getEntity(type: 'db', key: string, assertExists?: boolean): Db; getEntity(type: 'collection', key: string, assertExists?: boolean): Collection; - getEntity(type: 'session', key: string, assertExists?: boolean): ClientSession; + getEntity(type: 'session', key: string, assertExists?: boolean): UnifiedClientSession; getEntity(type: 'bucket', key: string, assertExists?: boolean): GridFSBucket; getEntity(type: 'stream', key: string, assertExists?: boolean): UnifiedChangeStream; getEntity(type: EntityTypeId, key: string, assertExists = true): Entity { const entity = this.get(key); if (!entity) { - if (assertExists) throw new Error(`Entity ${key} does not exist`); + if (assertExists) throw new Error(`Entity '${key}' does not exist`); return; } const ctor = ENTITY_CTORS.get(type); @@ -163,7 +170,8 @@ export class EntitiesMap extends Map { const map = new EntitiesMap(); for (const entity of entities ?? []) { if ('client' in entity) { - const client = new UnifiedMongoClient(config.url(), entity.client); + const uri = config.url({ useMultipleMongoses: entity.client.useMultipleMongoses }); + const client = new UnifiedMongoClient(uri, entity.client); await client.connect(); map.set(entity.client.id, client); } else if ('database' in entity) { @@ -181,11 +189,60 @@ export class EntitiesMap extends Map { ); map.set(entity.collection.id, collection); } else if ('session' in entity) { - map.set(entity.session.id, null); + const client = map.getEntity('client', entity.session.client); + + const options = Object.create(null); + + if (entity.session.sessionOptions?.causalConsistency) { + options.causalConsistency = entity.session.sessionOptions?.causalConsistency; + } + + if (entity.session.sessionOptions?.defaultTransactionOptions) { + options.defaultTransactionOptions = Object.create(null); + const defaultOptions = entity.session.sessionOptions.defaultTransactionOptions; + if (defaultOptions.readConcern) { + options.defaultTransactionOptions.readConcern = ReadConcern.fromOptions( + defaultOptions.readConcern + ); + } + if (defaultOptions.writeConcern) { + options.defaultTransactionOptions.writeConcern = WriteConcern.fromOptions( + defaultOptions + ); + } + if (defaultOptions.readPreference) { + options.defaultTransactionOptions.readPreference = ReadPreference.fromOptions( + defaultOptions.readPreference + ); + } + if (typeof defaultOptions.maxCommitTimeMS === 'number') { + options.defaultTransactionOptions.maxCommitTimeMS = defaultOptions.maxCommitTimeMS; + } + } + + const session = client.startSession(options) as UnifiedClientSession; + // targetedFailPoint operations need to access the client the session came from + session.client = client; + + map.set(entity.session.id, session); } else if ('bucket' in entity) { - map.set(entity.bucket.id, null); + const db = map.getEntity('db', entity.bucket.database); + + const options = Object.create(null); + + if (entity.bucket.bucketOptions?.bucketName) { + options.bucketName = entity.bucket.bucketOptions?.bucketName; + } + if (entity.bucket.bucketOptions?.chunkSizeBytes) { + options.chunkSizeBytes = entity.bucket.bucketOptions?.chunkSizeBytes; + } + if (entity.bucket.bucketOptions?.readPreference) { + options.readPreference = entity.bucket.bucketOptions?.readPreference; + } + + map.set(entity.bucket.id, new GridFSBucket(db, options)); } else if ('stream' in entity) { - map.set(entity.stream.id, null); + throw new Error(`Unsupported Entity ${JSON.stringify(entity)}`); } else { throw new Error(`Unsupported Entity ${JSON.stringify(entity)}`); } diff --git a/test/functional/unified-spec-runner/match.ts b/test/functional/unified-spec-runner/match.ts index 9735063ac88..671344ed7b7 100644 --- a/test/functional/unified-spec-runner/match.ts +++ b/test/functional/unified-spec-runner/match.ts @@ -1,5 +1,4 @@ import { expect } from 'chai'; -import { isDeepStrictEqual } from 'util'; import { Binary, Document, Long, ObjectId, MongoError } from '../../../src'; import { CommandFailedEvent, @@ -107,61 +106,55 @@ TYPE_MAP.set( actual => (typeof actual === 'number' && Number.isInteger(actual)) || Long.isLong(actual) ); -export function expectResultCheck( - actual: Document, - expected: Document | number | string | boolean, - entities: EntitiesMap, - path: string[] = [], - depth = 0 -): boolean { - const ok = resultCheck(actual, expected, entities, path, depth); - if (ok === false) { - const pathString = path.join(''); - const expectedJSON = JSON.stringify(expected, undefined, 2); - const actualJSON = JSON.stringify(actual, undefined, 2); - expect.fail(`Unable to match ${expectedJSON} to ${actualJSON} at ${pathString}`); - } - return ok; -} - export function resultCheck( actual: Document, expected: Document | number | string | boolean, entities: EntitiesMap, - path: string[], + path: string[] = [], depth = 0 -): boolean { - if (typeof expected === 'object' && expected !== null) { +): void { + if (typeof expected === 'object' && expected) { // Expected is an object // either its a special operator or just an object to check equality against if (isSpecialOperator(expected)) { // Special operation check is a base condition // specialCheck may recurse depending upon the check ($$unsetOrMatches) - return specialCheck(actual, expected, entities, path, depth); + specialCheck(actual, expected, entities, path, depth); + return; } else { // Just a plain object, however this object can contain special operations // So we need to recurse over each key,value - let ok = true; const expectedEntries = Object.entries(expected); - if (depth > 1 && Object.keys(actual).length !== Object.keys(expected).length) { - throw new Error(`[${Object.keys(actual)}] length !== [${Object.keys(expected)}]`); + if (depth > 1) { + expect(actual, `Expected actual to exist at ${path.join('')}`).to.exist; + expect( + Object.keys(actual), + `[${Object.keys(actual)}] length !== [${Object.keys(expected)}]` + ).to.have.lengthOf(Object.keys(expected).length); } for (const [key, value] of expectedEntries) { path.push(Array.isArray(expected) ? `[${key}]` : `.${key}`); // record what key we're at depth += 1; - ok &&= expectResultCheck(actual[key], value, entities, path, depth); + resultCheck(actual[key], value, entities, path, depth); depth -= 1; path.pop(); // if the recursion was successful we can drop the tested key } - return ok; } } else { // Here's our recursion base case - // expected is: number | string | boolean | null - return isDeepStrictEqual(actual, expected); + // expected is: number | Long | string | boolean | null + if (Long.isLong(actual) && typeof expected === 'number') { + // Long requires special equality check + expect(actual.equals(expected)).to.be.true; + } else if (Long.isLong(expected) && typeof actual === 'number') { + // Long requires special equality check + expect(expected.equals(actual)).to.be.true; + } else { + expect(actual).to.equal(expected); + } } } @@ -172,45 +165,56 @@ export function specialCheck( path: string[] = [], depth = 0 ): boolean { - let ok = false; if (isUnsetOrMatchesOperator(expected)) { // $$unsetOrMatches - ok = true; // start with true assumption - if (actual === null || actual === undefined) ok = true; + if (actual === null || actual === undefined) return; else { depth += 1; - ok &&= expectResultCheck(actual, expected.$$unsetOrMatches, entities, path, depth); + resultCheck(actual, expected.$$unsetOrMatches, entities, path, depth); depth -= 1; } } else if (isMatchesEntityOperator(expected)) { // $$matchesEntity const entity = entities.get(expected.$$matchesEntity); - if (!entity) ok = false; - else ok = isDeepStrictEqual(actual, entity); + if ( + typeof actual === 'object' && // an object + actual && // that isn't null + 'equals' in actual && // with an equals + typeof actual.equals === 'function' // method + ) { + expect(actual.equals(entity)).to.be.true; + } else { + expect(actual).to.equal(entity); + } } else if (isMatchesHexBytesOperator(expected)) { // $$matchesHexBytes const expectedBuffer = Buffer.from(expected.$$matchesHexBytes, 'hex'); - ok = expectedBuffer.every((byte, index) => byte === actual[index]); + expect(expectedBuffer.every((byte, index) => byte === actual[index])).to.be.true; } else if (isSessionLsidOperator(expected)) { // $$sessionLsid const session = entities.getEntity('session', expected.$$sessionLsid, false); - if (!session) ok = false; - else ok = session.id.id.buffer.equals(actual.lsid.id.buffer); + expect(session, `Session ${expected.$$sessionLsid} does not exist in entities`).to.exist; + const entitySessionHex = session.id.id.buffer.toString('hex').toUpperCase(); + const actualSessionHex = actual.id.buffer.toString('hex').toUpperCase(); + expect( + entitySessionHex, + `Session entity ${expected.$$sessionLsid} does not match lsid` + ).to.equal(actualSessionHex); } else if (isTypeOperator(expected)) { // $$type + let ok: boolean; const types = Array.isArray(expected.$$type) ? expected.$$type : [expected.$$type]; for (const type of types) { ok ||= TYPE_MAP.get(type)(actual); } + expect(ok, `Expected [${actual}] to be one of [${types}]`).to.be.true; } else if (isExistsOperator(expected)) { - // $$exists - unique, this op uses the path to check if the key is (not) in actual + // $$exists const actualExists = actual !== undefined && actual !== null; - ok = (expected.$$exists && actualExists) || (!expected.$$exists && !actualExists); + expect((expected.$$exists && actualExists) || (!expected.$$exists && !actualExists)).to.be.true; } else { - throw new Error(`Unknown special operator: ${JSON.stringify(expected)}`); + expect.fail(`Unknown special operator: ${JSON.stringify(expected)}`); } - - return ok; } export function matchesEvents( @@ -225,14 +229,14 @@ export function matchesEvents( const expectedEvent = expected[index]; if (expectedEvent.commandStartedEvent && actualEvent instanceof CommandStartedEvent) { - expectResultCheck(actualEvent, expectedEvent.commandStartedEvent, entities, [ + resultCheck(actualEvent, expectedEvent.commandStartedEvent, entities, [ `events[${index}].commandStartedEvent` ]); } else if ( expectedEvent.commandSucceededEvent && actualEvent instanceof CommandSucceededEvent ) { - expectResultCheck(actualEvent, expectedEvent.commandSucceededEvent, entities, [ + resultCheck(actualEvent, expectedEvent.commandSucceededEvent, entities, [ `events[${index}].commandSucceededEvent` ]); } else if (expectedEvent.commandFailedEvent && actualEvent instanceof CommandFailedEvent) { @@ -253,49 +257,43 @@ export function expectErrorCheck( return; } - if (expected.errorContains) { - if (error.message.includes(expected.errorContains)) { - throw new Error( - `Error message was supposed to contain '${expected.errorContains}' but had '${error.message}'` - ); - } + if (expected.errorContains != null) { + expect(error.message).to.include(expected.errorContains); } if (!(error instanceof MongoError)) { - throw new Error(`Assertions need ${error} to be a MongoError`); + // if statement asserts type for TS, expect will always fail + expect(error).to.be.instanceOf(MongoError); + return; } - if (expected.errorCode) { - if (error.code !== expected.errorCode) { - throw new Error(`${error} was supposed to have code '${expected.errorCode}'`); - } + if (expected.errorCode != null) { + expect(error).to.have.property('code', expected.errorCode); } - if (expected.errorCodeName) { - if (error.codeName !== expected.errorCodeName) { - throw new Error(`${error} was supposed to have '${expected.errorCodeName}' codeName`); - } + if (expected.errorCodeName != null) { + expect(error).to.have.property('codeName', expected.errorCodeName); } - if (expected.errorLabelsContain) { + if (expected.errorLabelsContain != null) { for (const errorLabel of expected.errorLabelsContain) { - if (!error.hasErrorLabel(errorLabel)) { - throw new Error(`${error} was supposed to have '${errorLabel}'`); - } + expect( + error.hasErrorLabel(errorLabel), + `Error was supposed to have label ${errorLabel}, has [${error.errorLabels}]` + ).to.be.true; } } - if (expected.errorLabelsOmit) { + if (expected.errorLabelsOmit != null) { for (const errorLabel of expected.errorLabelsOmit) { - if (error.hasErrorLabel(errorLabel)) { - throw new Error(`${error} was not supposed to have '${errorLabel}'`); - } + expect( + error.hasErrorLabel(errorLabel), + `Error was supposed to have label ${errorLabel}, has [${error.errorLabels}]` + ).to.be.false; } } - if (expected.expectResult) { - if (!expectResultCheck(error, expected.expectResult, entities)) { - throw new Error(`${error} supposed to match result ${JSON.stringify(expected.expectResult)}`); - } + if (expected.expectResult != null) { + resultCheck(error, expected.expectResult, entities); } } diff --git a/test/functional/unified-spec-runner/operations.ts b/test/functional/unified-spec-runner/operations.ts index 43e7851114c..c0608eebf52 100644 --- a/test/functional/unified-spec-runner/operations.ts +++ b/test/functional/unified-spec-runner/operations.ts @@ -1,135 +1,186 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import { expect } from 'chai'; -import { Collection, Db } from '../../../src'; -import { ChangeStream, Document, InsertOneOptions } from '../../../src'; -import { BulkWriteResult } from '../../../src/bulk/common'; +import { Collection, Db, GridFSFile, MongoClient, ObjectId } from '../../../src'; +import { ReadConcern } from '../../../src/read_concern'; +import { ReadPreference } from '../../../src/read_preference'; +import { WriteConcern } from '../../../src/write_concern'; +import { Document, InsertOneOptions } from '../../../src'; import { EventCollector } from '../../tools/utils'; -import { EntitiesMap } from './entities'; -import { expectErrorCheck, expectResultCheck } from './match'; -import type * as uni from './schema'; +import { EntitiesMap, UnifiedMongoClient } from './entities'; +import { expectErrorCheck, resultCheck } from './match'; +import type { OperationDescription } from './schema'; +import { CommandStartedEvent } from '../../../src/cmap/events'; -async function abortTransactionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); +interface OperationFunctionParams { + client: MongoClient; + operation: OperationDescription; + entities: EntitiesMap; } -async function aggregateOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const dbOrCollection = entities.get(op.object) as Db | Collection; + +type RunOperationFn = (p: OperationFunctionParams) => Promise; +export const operations = new Map(); + +operations.set('abortTransaction', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.object); + return session.abortTransaction(); +}); + +operations.set('aggregate', async ({ entities, operation }) => { + const dbOrCollection = entities.get(operation.object) as Db | Collection; if (!(dbOrCollection instanceof Db || dbOrCollection instanceof Collection)) { - throw new Error(`Operation object '${op.object}' must be a db or collection`); + throw new Error(`Operation object '${operation.object}' must be a db or collection`); } return dbOrCollection - .aggregate(op.arguments.pipeline, { - allowDiskUse: op.arguments.allowDiskUse, - batchSize: op.arguments.batchSize, - bypassDocumentValidation: op.arguments.bypassDocumentValidation, - maxTimeMS: op.arguments.maxTimeMS, - maxAwaitTimeMS: op.arguments.maxAwaitTimeMS, - collation: op.arguments.collation, - hint: op.arguments.hint, - out: op.arguments.out + .aggregate(operation.arguments.pipeline, { + allowDiskUse: operation.arguments.allowDiskUse, + batchSize: operation.arguments.batchSize, + bypassDocumentValidation: operation.arguments.bypassDocumentValidation, + maxTimeMS: operation.arguments.maxTimeMS, + maxAwaitTimeMS: operation.arguments.maxAwaitTimeMS, + collation: operation.arguments.collation, + hint: operation.arguments.hint, + out: operation.arguments.out }) .toArray(); -} -async function assertCollectionExistsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertCollectionNotExistsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertIndexExistsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertIndexNotExistsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertDifferentLsidOnLastTwoCommandsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSameLsidOnLastTwoCommandsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSessionDirtyOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSessionNotDirtyOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSessionPinnedOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSessionUnpinnedOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function assertSessionTransactionStateOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function bulkWriteOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.bulkWrite(op.arguments.requests); -} -async function commitTransactionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const session = entities.getEntity('session', op.object); +}); + +operations.set('assertCollectionExists', async ({ operation, client }) => { + const collections = ( + await client + .db(operation.arguments.databaseName) + .listCollections({}, { nameOnly: true }) + .toArray() + ).map(({ name }) => name); + expect(collections).to.include(operation.arguments.collectionName); +}); + +operations.set('assertCollectionNotExists', async ({ operation, client }) => { + const collections = ( + await client + .db(operation.arguments.databaseName) + .listCollections({}, { nameOnly: true }) + .toArray() + ).map(({ name }) => name); + expect(collections).to.not.include(operation.arguments.collectionName); +}); + +operations.set('assertIndexExists', async ({ operation, client }) => { + const collection = client + .db(operation.arguments.databaseName) + .collection(operation.arguments.collectionName); + const indexes = (await collection.listIndexes().toArray()).map(({ name }) => name); + expect(indexes).to.include(operation.arguments.indexName); +}); + +operations.set('assertIndexNotExists', async ({ operation, client }) => { + const collection = client + .db(operation.arguments.databaseName) + .collection(operation.arguments.collectionName); + try { + expect(await collection.indexExists(operation.arguments.indexName)).to.be.true; + } catch (error) { + if (error.code === 26 || error.message.includes('ns does not exist')) { + return; + } + throw error; + } +}); + +operations.set('assertDifferentLsidOnLastTwoCommands', async ({ entities, operation }) => { + const client = entities.getEntity('client', operation.arguments.client); + expect(client.observedEvents.includes('commandStarted')).to.be.true; + + const startedEvents = client.events.filter( + ev => ev instanceof CommandStartedEvent + ) as CommandStartedEvent[]; + + expect(startedEvents).to.have.length.gte(2); + + const last = startedEvents[startedEvents.length - 1]; + const secondLast = startedEvents[startedEvents.length - 2]; + + expect(last.command).to.have.property('lsid'); + expect(secondLast.command).to.have.property('lsid'); + + expect(last.command.lsid.id.buffer.equals(secondLast.command.lsid.id.buffer)).to.be.false; +}); + +operations.set('assertSameLsidOnLastTwoCommands', async ({ entities, operation }) => { + const client = entities.getEntity('client', operation.arguments.client); + expect(client.observedEvents.includes('commandStarted')).to.be.true; + + const startedEvents = client.events.filter( + ev => ev instanceof CommandStartedEvent + ) as CommandStartedEvent[]; + + expect(startedEvents).to.have.length.gte(2); + + const last = startedEvents[startedEvents.length - 1]; + const secondLast = startedEvents[startedEvents.length - 2]; + + expect(last.command).to.have.property('lsid'); + expect(secondLast.command).to.have.property('lsid'); + + expect(last.command.lsid.id.buffer.equals(secondLast.command.lsid.id.buffer)).to.be.true; +}); + +operations.set('assertSessionDirty', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + expect(session.serverSession.isDirty).to.be.true; +}); + +operations.set('assertSessionNotDirty', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + expect(session.serverSession.isDirty).to.be.false; +}); + +operations.set('assertSessionPinned', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + expect(session.transaction.isPinned).to.be.false; +}); + +operations.set('assertSessionUnpinned', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + expect(session.transaction.isPinned).to.be.false; +}); + +operations.set('assertSessionTransactionState', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + + const transactionStateTranslation = { + none: 'NO_TRANSACTION', + starting: 'STARTING_TRANSACTION', + in_progress: 'TRANSACTION_IN_PROGRESS', + committed: 'TRANSACTION_COMMITTED', + aborted: 'TRANSACTION_ABORTED' + }; + + const driverTransactionStateName = transactionStateTranslation[operation.arguments.state]; + expect(session.transaction.state).to.equal(driverTransactionStateName); +}); + +operations.set('bulkWrite', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.bulkWrite(operation.arguments.requests); +}); + +operations.set('commitTransaction', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.object); return session.commitTransaction(); -} -async function createChangeStreamOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const watchable = entities.get(op.object); +}); + +operations.set('createChangeStream', async ({ entities, operation }) => { + const watchable = entities.get(operation.object); if (!('watch' in watchable)) { - throw new Error(`Entity ${op.object} must be watchable`); + throw new Error(`Entity ${operation.object} must be watchable`); } - const changeStream = watchable.watch(op.arguments.pipeline, { - fullDocument: op.arguments.fullDocument, - maxAwaitTimeMS: op.arguments.maxAwaitTimeMS, - resumeAfter: op.arguments.resumeAfter, - startAfter: op.arguments.startAfter, - startAtOperationTime: op.arguments.startAtOperationTime, - batchSize: op.arguments.batchSize + const changeStream = watchable.watch(operation.arguments.pipeline, { + fullDocument: operation.arguments.fullDocument, + maxAwaitTimeMS: operation.arguments.maxAwaitTimeMS, + resumeAfter: operation.arguments.resumeAfter, + startAfter: operation.arguments.startAfter, + startAtOperationTime: operation.arguments.startAtOperationTime, + batchSize: operation.arguments.batchSize }); changeStream.eventCollector = new EventCollector(changeStream, ['init', 'change', 'error']); @@ -143,281 +194,221 @@ async function createChangeStreamOperation( resolve(changeStream); }); }); -} -async function createCollectionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function createIndexOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function deleteOneOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.deleteOne(op.arguments.filter); -} -async function dropCollectionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function endSessionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function findOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - const { filter, sort, batchSize, limit } = op.arguments; +}); + +operations.set('createCollection', async ({ entities, operation }) => { + const db = entities.getEntity('db', operation.object); + const session = entities.getEntity('session', operation.arguments.session, false); + await db.createCollection(operation.arguments.collection, { session }); +}); + +operations.set('createIndex', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + const session = entities.getEntity('session', operation.arguments.session, false); + await collection.createIndex(operation.arguments.keys, { + session, + name: operation.arguments.name + }); +}); + +operations.set('deleteOne', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.deleteOne(operation.arguments.filter); +}); + +operations.set('dropCollection', async ({ entities, operation }) => { + const db = entities.getEntity('db', operation.object); + return await db.dropCollection(operation.arguments.collection); +}); + +operations.set('endSession', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.object); + return session.endSession(); +}); + +operations.set('find', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + const { filter, sort, batchSize, limit } = operation.arguments; return collection.find(filter, { sort, batchSize, limit }).toArray(); -} -async function findOneAndReplaceOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.findOneAndReplace(op.arguments.filter, op.arguments.replacement); -} -async function findOneAndUpdateOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - const returnOriginal = op.arguments.returnDocument === 'Before'; +}); + +operations.set('findOneAndReplace', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.findOneAndReplace(operation.arguments.filter, operation.arguments.replacement); +}); + +operations.set('findOneAndUpdate', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + const returnOriginal = operation.arguments.returnDocument === 'Before'; return ( - await collection.findOneAndUpdate(op.arguments.filter, op.arguments.update, { returnOriginal }) + await collection.findOneAndUpdate(operation.arguments.filter, operation.arguments.update, { + returnOriginal + }) ).value; -} -async function failPointOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const client = entities.getEntity('client', op.arguments.client); - return client.enableFailPoint(op.arguments.failPoint); -} -async function insertOneOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); +}); + +operations.set('failPoint', async ({ entities, operation }) => { + const client = entities.getEntity('client', operation.arguments.client); + return client.enableFailPoint(operation.arguments.failPoint); +}); - const session = entities.getEntity('session', op.arguments.session, false); +operations.set('insertOne', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + + const session = entities.getEntity('session', operation.arguments.session, false); const options = { session } as InsertOneOptions; - return collection.insertOne(op.arguments.document, options); -} -async function insertManyOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); + return collection.insertOne(operation.arguments.document, options); +}); - const session = entities.getEntity('session', op.arguments.session, false); +operations.set('insertMany', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + + const session = entities.getEntity('session', operation.arguments.session, false); const options = { session, - ordered: op.arguments.ordered ?? true + ordered: operation.arguments.ordered ?? true }; - return collection.insertMany(op.arguments.documents, options); -} -async function iterateUntilDocumentOrErrorOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const changeStream = entities.getEntity('stream', op.object); + return collection.insertMany(operation.arguments.documents, options); +}); + +operations.set('iterateUntilDocumentOrError', async ({ entities, operation }) => { + const changeStream = entities.getEntity('stream', operation.object); // Either change or error promise will finish return Promise.race([ changeStream.eventCollector.waitAndShiftEvent('change'), changeStream.eventCollector.waitAndShiftEvent('error') ]); -} -async function listDatabasesOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function replaceOneOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.replaceOne(op.arguments.filter, op.arguments.replacement, { - bypassDocumentValidation: op.arguments.bypassDocumentValidation, - collation: op.arguments.collation, - hint: op.arguments.hint, - upsert: op.arguments.upsert +}); + +operations.set('listDatabases', async ({ entities, operation }) => { + const client = entities.getEntity('client', operation.object); + return client.db().admin().listDatabases(); +}); + +operations.set('replaceOne', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.replaceOne(operation.arguments.filter, operation.arguments.replacement, { + bypassDocumentValidation: operation.arguments.bypassDocumentValidation, + collation: operation.arguments.collation, + hint: operation.arguments.hint, + upsert: operation.arguments.upsert }); -} -async function startTransactionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const session = entities.getEntity('session', op.object); +}); + +operations.set('startTransaction', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.object); session.startTransaction(); -} -async function targetedFailPointOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function deleteOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function downloadOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function uploadOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function withTransactionOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - throw new Error('not implemented.'); -} -async function countDocumentsOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.countDocuments(op.arguments.filter as Document); -} -async function deleteManyOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.deleteMany(op.arguments.filter); -} -async function distinctOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.distinct(op.arguments.fieldName as string, op.arguments.filter as Document); -} -async function estimatedDocumentCountOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); +}); + +operations.set('targetedFailPoint', async ({ entities, operation }) => { + const session = entities.getEntity('session', operation.arguments.session); + expect(session.transaction.isPinned, 'Session must be pinned for a targetedFailPoint').to.be.true; + const client = session.client; + client.enableFailPoint(operation.arguments.failPoint); +}); + +operations.set('delete', async ({ entities, operation }) => { + const bucket = entities.getEntity('bucket', operation.object); + return bucket.delete(operation.arguments.id); +}); + +operations.set('download', async ({ entities, operation }) => { + const bucket = entities.getEntity('bucket', operation.object); + + const stream = bucket.openDownloadStream(operation.arguments.id); + return new Promise((resolve, reject) => { + const chunks = []; + stream.on('data', chunk => chunks.push(...chunk)); + stream.on('error', reject); + stream.on('end', () => resolve(chunks)); + }); +}); + +operations.set('upload', async ({ entities, operation }) => { + const bucket = entities.getEntity('bucket', operation.object); + + const stream = bucket.openUploadStream(operation.arguments.filename, { + chunkSizeBytes: operation.arguments.chunkSizeBytes + }); + + return new Promise((resolve, reject) => { + stream.end(Buffer.from(operation.arguments.source.$$hexBytes, 'hex'), (error, file) => { + if (error) reject(error); + resolve((file as GridFSFile)._id as ObjectId); + }); + }); +}); + +operations.set('withTransaction', async ({ entities, operation, client }) => { + const session = entities.getEntity('session', operation.object); + + const options = { + readConcern: ReadConcern.fromOptions(operation.arguments), + writeConcern: WriteConcern.fromOptions(operation.arguments), + readPreference: ReadPreference.fromOptions(operation.arguments), + maxCommitTimeMS: operation.arguments.maxCommitTimeMS + }; + + return session.withTransaction(async () => { + for (const callbackOperation of operation.arguments.callback) { + await executeOperationAndCheck(callbackOperation, entities, client); + } + }, options); +}); + +operations.set('countDocuments', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.countDocuments(operation.arguments.filter as Document); +}); + +operations.set('deleteMany', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.deleteMany(operation.arguments.filter); +}); + +operations.set('distinct', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.distinct( + operation.arguments.fieldName as string, + operation.arguments.filter as Document + ); +}); + +operations.set('estimatedDocumentCount', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); return collection.estimatedDocumentCount(); -} -async function findOneAndDeleteOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.findOneAndDelete(op.arguments.filter); -} -async function runCommandOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const db = entities.getEntity('db', op.object); - return db.command(op.arguments.command); -} -async function updateManyOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.updateMany(op.arguments.filter, op.arguments.update); -} -async function updateOneOperation( - entities: EntitiesMap, - op: uni.OperationDescription -): Promise { - const collection = entities.getEntity('collection', op.object); - return collection.updateOne(op.arguments.filter, op.arguments.update); -} +}); -type RunOperationFn = ( - entities: EntitiesMap, - op: uni.OperationDescription -) => Promise; -export const operations = new Map(); +operations.set('findOneAndDelete', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.findOneAndDelete(operation.arguments.filter); +}); + +operations.set('runCommand', async ({ entities, operation }: OperationFunctionParams) => { + const db = entities.getEntity('db', operation.object); + return db.command(operation.arguments.command); +}); -operations.set('abortTransaction', abortTransactionOperation); -operations.set('aggregate', aggregateOperation); -operations.set('assertCollectionExists', assertCollectionExistsOperation); -operations.set('assertCollectionNotExists', assertCollectionNotExistsOperation); -operations.set('assertIndexExists', assertIndexExistsOperation); -operations.set('assertIndexNotExists', assertIndexNotExistsOperation); -operations.set( - 'assertDifferentLsidOnLastTwoCommands', - assertDifferentLsidOnLastTwoCommandsOperation -); -operations.set('assertSameLsidOnLastTwoCommands', assertSameLsidOnLastTwoCommandsOperation); -operations.set('assertSessionDirty', assertSessionDirtyOperation); -operations.set('assertSessionNotDirty', assertSessionNotDirtyOperation); -operations.set('assertSessionPinned', assertSessionPinnedOperation); -operations.set('assertSessionUnpinned', assertSessionUnpinnedOperation); -operations.set('assertSessionTransactionState', assertSessionTransactionStateOperation); -operations.set('bulkWrite', bulkWriteOperation); -operations.set('commitTransaction', commitTransactionOperation); -operations.set('createChangeStream', createChangeStreamOperation); -operations.set('createCollection', createCollectionOperation); -operations.set('createIndex', createIndexOperation); -operations.set('deleteOne', deleteOneOperation); -operations.set('dropCollection', dropCollectionOperation); -operations.set('endSession', endSessionOperation); -operations.set('find', findOperation); -operations.set('findOneAndReplace', findOneAndReplaceOperation); -operations.set('findOneAndUpdate', findOneAndUpdateOperation); -operations.set('failPoint', failPointOperation); -operations.set('insertOne', insertOneOperation); -operations.set('insertMany', insertManyOperation); -operations.set('iterateUntilDocumentOrError', iterateUntilDocumentOrErrorOperation); -operations.set('listDatabases', listDatabasesOperation); -operations.set('replaceOne', replaceOneOperation); -operations.set('startTransaction', startTransactionOperation); -operations.set('targetedFailPoint', targetedFailPointOperation); -operations.set('delete', deleteOperation); -operations.set('download', downloadOperation); -operations.set('upload', uploadOperation); -operations.set('withTransaction', withTransactionOperation); - -// Versioned API adds these: -operations.set('countDocuments', countDocumentsOperation); -operations.set('deleteMany', deleteManyOperation); -operations.set('distinct', distinctOperation); -operations.set('estimatedDocumentCount', estimatedDocumentCountOperation); -operations.set('findOneAndDelete', findOneAndDeleteOperation); -operations.set('runCommand', runCommandOperation); -operations.set('updateMany', updateManyOperation); -operations.set('updateOne', updateOneOperation); +operations.set('updateMany', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.updateMany(operation.arguments.filter, operation.arguments.update); +}); + +operations.set('updateOne', async ({ entities, operation }) => { + const collection = entities.getEntity('collection', operation.object); + return collection.updateOne(operation.arguments.filter, operation.arguments.update); +}); export async function executeOperationAndCheck( - operation: uni.OperationDescription, - entities: EntitiesMap + operation: OperationDescription, + entities: EntitiesMap, + client: MongoClient ): Promise { const opFunc = operations.get(operation.name); expect(opFunc, `Unknown operation: ${operation.name}`).to.exist; @@ -425,18 +416,14 @@ export async function executeOperationAndCheck( let result; try { - result = await opFunc(entities, operation); + result = await opFunc({ entities, operation, client }); } catch (error) { - // FIXME: Remove when project is done: - if (error.message === 'not implemented.') { - throw error; - } if (operation.expectError) { expectErrorCheck(error, operation.expectError, entities); + return; } else { - expect.fail(`Operation ${operation.name} failed with ${error.message}`); + throw error; } - return; } // We check the positive outcome here so the try-catch above doesn't catch our chai assertions @@ -446,7 +433,7 @@ export async function executeOperationAndCheck( } if (operation.expectResult) { - expect(expectResultCheck(result, operation.expectResult, entities)).to.be.true; + resultCheck(result, operation.expectResult, entities); } if (operation.saveResultAsEntity) { diff --git a/test/functional/unified-spec-runner/runner.ts b/test/functional/unified-spec-runner/runner.ts new file mode 100644 index 00000000000..d4dfc378f6a --- /dev/null +++ b/test/functional/unified-spec-runner/runner.ts @@ -0,0 +1,203 @@ +import { expect } from 'chai'; +import { ReadPreference } from '../../../src/read_preference'; +import * as uni from './schema'; +import { zip, topologySatisfies, patchVersion } from './unified-utils'; +import { CommandEvent, EntitiesMap } from './entities'; +import { ns } from '../../../src/utils'; +import { executeOperationAndCheck } from './operations'; +import { matchesEvents } from './match'; +import { satisfies as semverSatisfies } from 'semver'; + +export type TestConfiguration = InstanceType< + typeof import('../../tools/runner/config')['TestConfiguration'] +>; +interface MongoDBMochaTestContext extends Mocha.Context { + configuration: TestConfiguration; +} + +const SKIPPED_TESTS = [ + // These were already skipped in our existing spec tests + 'unpin after transient error within a transaction and commit', + 'Dirty explicit session is discarded', + + // TODO Un-skip these to complete unified runner + 'withTransaction inherits transaction options from client', + 'withTransaction inherits transaction options from defaultTransactionOptions', + 'remain pinned after non-transient Interrupted error on insertOne', + 'unpin after transient error within a transaction', + 'Client side error in command starting transaction', + 'explicitly create collection using create command', + 'create index on a non-existing collection', + 'InsertMany succeeds after PrimarySteppedDown', + 'withTransaction and no transaction options set', + 'withTransaction explicit transaction options', + 'InsertOne fails after connection failure when retryWrites option is false', + 'InsertOne fails after multiple retryable writeConcernErrors' +]; + +export async function runUnifiedTest( + ctx: MongoDBMochaTestContext, + unifiedSuite: uni.UnifiedSuite, + test: uni.Test +): Promise { + // Some basic expectations we can catch early + expect(test).to.exist; + expect(unifiedSuite).to.exist; + expect(ctx).to.exist; + expect(ctx.configuration).to.exist; + + const schemaVersion = patchVersion(unifiedSuite.schemaVersion); + expect(semverSatisfies(schemaVersion, uni.SupportedVersion)).to.be.true; + + // If test.skipReason is specified, the test runner MUST skip this + // test and MAY use the string value to log a message. + if (test.skipReason) { + console.warn(`Skipping test ${test.description}: ${test.skipReason}.`); + ctx.skip(); + } + + if (SKIPPED_TESTS.includes(test.description)) { + ctx.skip(); + } + + const utilClient = ctx.configuration.newClient(); + + let entities; + try { + await utilClient.connect(); + + // Must fetch parameters before checking runOnRequirements + ctx.configuration.parameters = await utilClient.db().admin().command({ getParameter: '*' }); + + // If test.runOnRequirements is specified, the test runner MUST skip the test unless one or more + // runOnRequirement objects are satisfied. + const allRequirements = [ + ...(unifiedSuite.runOnRequirements ?? []), + ...(test.runOnRequirements ?? []) + ]; + + let doesNotMeetRunOnRequirement = allRequirements.length > 0; + + for (const requirement of allRequirements) { + if (await topologySatisfies(ctx.configuration, requirement, utilClient)) { + doesNotMeetRunOnRequirement = false; // it does meet a run on requirement! + break; + } + } + + if (doesNotMeetRunOnRequirement) { + ctx.skip(); + } + + // If initialData is specified, for each collectionData therein the test runner MUST drop the + // collection and insert the specified documents (if any) using a "majority" write concern. If no + // documents are specified, the test runner MUST create the collection with a "majority" write concern. + // The test runner MUST use the internal MongoClient for these operations. + if (unifiedSuite.initialData) { + for (const collData of unifiedSuite.initialData) { + const db = utilClient.db(collData.databaseName); + const collection = db.collection(collData.collectionName, { + writeConcern: { w: 'majority' } + }); + const collectionList = await db + .listCollections({ name: collData.collectionName }) + .toArray(); + if (collectionList.length !== 0) { + expect(await collection.drop()).to.be.true; + } + } + + for (const collData of unifiedSuite.initialData) { + const db = utilClient.db(collData.databaseName); + const collection = db.collection(collData.collectionName, { + writeConcern: { w: 'majority' } + }); + + if (!collData.documents?.length) { + await db.createCollection(collData.collectionName, { + writeConcern: { w: 'majority' } + }); + continue; + } + + await collection.insertMany(collData.documents); + } + } + + entities = await EntitiesMap.createEntities(ctx.configuration, unifiedSuite.createEntities); + + // Workaround for SERVER-39704: + // test runners MUST execute a non-transactional distinct command on + // each mongos server before running any test that might execute distinct within a transaction. + // To ease the implementation, test runners MAY execute distinct before every test. + if ( + ctx.topologyType === uni.TopologyType.sharded || + ctx.topologyType === uni.TopologyType.shardedReplicaset + ) { + for (const [, collection] of entities.mapOf('collection')) { + await utilClient.db(ns(collection.namespace).db).command({ + distinct: collection.collectionName, + key: '_id' + }); + } + } + + for (const operation of test.operations) { + await executeOperationAndCheck(operation, entities, utilClient); + } + + const clientEvents = new Map(); + // If any event listeners were enabled on any client entities, + // the test runner MUST now disable those event listeners. + for (const [id, client] of entities.mapOf('client')) { + clientEvents.set(id, client.stopCapturingEvents()); + } + + if (test.expectEvents) { + for (const expectedEventList of test.expectEvents) { + const clientId = expectedEventList.client; + const actualEvents = clientEvents.get(clientId); + + expect(actualEvents, `No client entity found with id ${clientId}`).to.exist; + matchesEvents(expectedEventList.events, actualEvents, entities); + } + } + + if (test.outcome) { + for (const collectionData of test.outcome) { + const collection = utilClient + .db(collectionData.databaseName) + .collection(collectionData.collectionName); + const findOpts = { + readConcern: 'local' as const, + readPreference: ReadPreference.primary, + sort: { _id: 'asc' as const } + }; + const documents = await collection.find({}, findOpts).toArray(); + + expect(documents).to.have.lengthOf(collectionData.documents.length); + for (const [expected, actual] of zip(collectionData.documents, documents)) { + expect(actual).to.include(expected, 'Test outcome did not match expected'); + } + } + } + } finally { + await utilClient.close(); + await entities?.cleanup(); + } +} + +export function runUnifiedSuite(specTests: uni.UnifiedSuite[]): void { + for (const unifiedSuite of specTests) { + context(String(unifiedSuite.description), function () { + for (const test of unifiedSuite.tests) { + it(String(test.description), { + metadata: { sessions: { skipLeakTests: true } }, + test: async function () { + await runUnifiedTest(this, unifiedSuite, test); + } + }); + } + }); + } +} diff --git a/test/functional/unified-spec-runner/unified-runner.test.ts b/test/functional/unified-spec-runner/unified-runner.test.ts new file mode 100644 index 00000000000..da4206e0064 --- /dev/null +++ b/test/functional/unified-spec-runner/unified-runner.test.ts @@ -0,0 +1,17 @@ +import { loadSpecTests } from '../../spec/index'; +import { runUnifiedSuite } from './runner'; + +describe('Unified test format runner', function unifiedTestRunner() { + // Valid tests that should pass + runUnifiedSuite(loadSpecTests('unified-test-format/valid-pass')); + + // Valid tests that should fail + // for (const unifiedSuite of loadSpecTests('unified-test-format/valid-fail')) { + // // TODO + // } + + // Tests that are invalid, would be good to gracefully fail on + // for (const unifiedSuite of loadSpecTests('unified-test-format/invalid')) { + // // TODO + // } +}); diff --git a/test/functional/unified-spec-runner/unified-utils.ts b/test/functional/unified-spec-runner/unified-utils.ts index 6abb2148bb9..e55ea468812 100644 --- a/test/functional/unified-spec-runner/unified-utils.ts +++ b/test/functional/unified-spec-runner/unified-utils.ts @@ -1,16 +1,20 @@ import { expect } from 'chai'; import type { CollectionOrDatabaseOptions, RunOnRequirement } from './schema'; -import type { TestConfiguration } from './unified.test'; import { gte as semverGte, lte as semverLte } from 'semver'; -import { CollectionOptions, DbOptions } from '../../../src'; +import { CollectionOptions, DbOptions, MongoClient } from '../../../src'; import { isDeepStrictEqual } from 'util'; +import { TestConfiguration } from './runner'; const ENABLE_UNIFIED_TEST_LOGGING = false; export function log(message: unknown, ...optionalParameters: unknown[]): void { if (ENABLE_UNIFIED_TEST_LOGGING) console.warn(message, ...optionalParameters); } -export function topologySatisfies(config: TestConfiguration, r: RunOnRequirement): boolean { +export async function topologySatisfies( + config: TestConfiguration, + r: RunOnRequirement, + utilClient: MongoClient +): Promise { let ok = true; if (r.minServerVersion) { const minVersion = patchVersion(r.minServerVersion); @@ -28,8 +32,14 @@ export function topologySatisfies(config: TestConfiguration, r: RunOnRequirement ReplicaSetWithPrimary: 'replicaset', Sharded: 'sharded' }[config.topologyType]; - if (!topologyType) throw new Error(`Topology undiscovered: ${config.topologyType}`); - ok &&= r.topologies.includes(topologyType); + + if (r.topologies.includes('sharded-replicaset')) { + const shards = await utilClient.db('config').collection('shards').find({}).toArray(); + ok &&= shards.every(shard => shard.host.split(',').length > 1); + } else { + if (!topologyType) throw new Error(`Topology undiscovered: ${config.topologyType}`); + ok &&= r.topologies.includes(topologyType); + } } if (r.serverParameters) { diff --git a/test/functional/unified-spec-runner/unified.test.ts b/test/functional/unified-spec-runner/unified.test.ts deleted file mode 100644 index 2dc973d2105..00000000000 --- a/test/functional/unified-spec-runner/unified.test.ts +++ /dev/null @@ -1,175 +0,0 @@ -import { expect } from 'chai'; -import { ReadPreference } from '../../../src/read_preference'; -import { loadSpecTests } from '../../spec/index'; -import * as uni from './schema'; -import { patchVersion, zip, log, topologySatisfies } from './unified-utils'; -import { CommandEvent, EntitiesMap } from './entities'; -import { ns } from '../../../src/utils'; -import { executeOperationAndCheck } from './operations'; -import { satisfies as semverSatisfies } from 'semver'; -import { matchesEvents } from './match'; - -export type TestConfiguration = InstanceType< - typeof import('../../tools/runner/config')['TestConfiguration'] ->; -interface MongoDBMochaTestContext extends Mocha.Context { - configuration: TestConfiguration; -} - -async function runOne( - ctx: MongoDBMochaTestContext, - unifiedSuite: uni.UnifiedSuite, - test: uni.Test -) { - // Some basic expectations we can catch early - expect(test).to.exist; - expect(unifiedSuite).to.exist; - expect(ctx).to.exist; - expect(ctx.configuration).to.exist; - - // If test.skipReason is specified, the test runner MUST skip this - // test and MAY use the string value to log a message. - if (test.skipReason) { - console.warn(`Skipping test ${test.description}: ${test.skipReason}.`); - ctx.skip(); - } - - const UTIL_CLIENT = ctx.configuration.newClient(); - await UTIL_CLIENT.connect(); - ctx.defer(async () => await UTIL_CLIENT.close()); - - // Must fetch parameters before checking runOnRequirements - ctx.configuration.parameters = await UTIL_CLIENT.db().admin().command({ getParameter: '*' }); - - // If test.runOnRequirements is specified, the test runner MUST skip the test unless one or more - // runOnRequirement objects are satisfied. - const allRequirements = [ - ...(unifiedSuite.runOnRequirements ?? []), - ...(test.runOnRequirements ?? []) - ]; - for (const requirement of allRequirements) { - if (!topologySatisfies(ctx.configuration, requirement)) { - ctx.skip(); - } - } - - // If initialData is specified, for each collectionData therein the test runner MUST drop the - // collection and insert the specified documents (if any) using a "majority" write concern. If no - // documents are specified, the test runner MUST create the collection with a "majority" write concern. - // The test runner MUST use the internal MongoClient for these operations. - if (unifiedSuite.initialData) { - for (const collData of unifiedSuite.initialData) { - const db = UTIL_CLIENT.db(collData.databaseName); - const collection = db.collection(collData.collectionName, { - writeConcern: { w: 'majority' } - }); - const collectionList = await db.listCollections({ name: collData.collectionName }).toArray(); - if (collectionList.length !== 0) { - expect(await collection.drop()).to.be.true; - } - - if (collData.documents.length === 0) { - await db.createCollection(collData.collectionName, { - writeConcern: { w: 'majority' } - }); - continue; - } - - await collection.insertMany(collData.documents); - } - } - - const entities = await EntitiesMap.createEntities(ctx.configuration, unifiedSuite.createEntities); - ctx.defer(async () => await entities.cleanup()); - - // Workaround for SERVER-39704: - // test runners MUST execute a non-transactional distinct command on - // each mongos server before running any test that might execute distinct within a transaction. - // To ease the implementation, test runners MAY execute distinct before every test. - if ( - ctx.topologyType === uni.TopologyType.sharded || - ctx.topologyType === uni.TopologyType.shardedReplicaset - ) { - for (const [, collection] of entities.mapOf('collection')) { - await UTIL_CLIENT.db(ns(collection.namespace).db).command({ - distinct: collection.collectionName, - key: '_id' - }); - } - } - - for (const operation of test.operations) { - await executeOperationAndCheck(operation, entities); - } - - const clientEvents = new Map(); - // If any event listeners were enabled on any client entities, - // the test runner MUST now disable those event listeners. - for (const [id, client] of entities.mapOf('client')) { - clientEvents.set(id, client.stopCapturingEvents()); - } - - if (test.expectEvents) { - for (const expectedEventList of test.expectEvents) { - const clientId = expectedEventList.client; - const actualEvents = clientEvents.get(clientId); - - expect(actualEvents, `No client entity found with id ${clientId}`).to.exist; - matchesEvents(expectedEventList.events, actualEvents, entities); - } - } - - if (test.outcome) { - for (const collectionData of test.outcome) { - const collection = UTIL_CLIENT.db(collectionData.databaseName).collection( - collectionData.collectionName - ); - const findOpts = { - readConcern: 'local' as const, - readPreference: ReadPreference.primary, - sort: { _id: 'asc' as const } - }; - const documents = await collection.find({}, findOpts).toArray(); - - expect(documents).to.have.lengthOf(collectionData.documents.length); - for (const [expected, actual] of zip(collectionData.documents, documents)) { - expect(actual).to.include(expected, 'Test outcome did not match expected'); - } - } - } -} - -describe('Unified test format', function unifiedTestRunner() { - // Valid tests that should pass - for (const unifiedSuite of loadSpecTests('unified-test-format/valid-pass')) { - const schemaVersion = patchVersion(unifiedSuite.schemaVersion); - expect(semverSatisfies(schemaVersion, uni.SupportedVersion)).to.be.true; - context(String(unifiedSuite.description), function runUnifiedTest() { - for (const test of unifiedSuite.tests) { - it(String(test.description), async function runOneUnifiedTest() { - try { - await runOne(this as MongoDBMochaTestContext, unifiedSuite, test); - } catch (error) { - if (error.message.includes('not implemented.')) { - log(`${test.description}: was skipped due to missing functionality`); - log(error.stack); - this.skip(); - } else { - throw error; - } - } - }); - } - }); - } - - // Valid tests that should fail - // for (const unifiedSuite of loadSpecTests('unified-test-format/valid-fail')) { - // // TODO - // } - - // Tests that are invalid, would be good to gracefully fail on - // for (const unifiedSuite of loadSpecTests('unified-test-format/invalid')) { - // // TODO - // } -}); diff --git a/test/tools/runner/config.js b/test/tools/runner/config.js index ddc1ae92752..15d171895af 100644 --- a/test/tools/runner/config.js +++ b/test/tools/runner/config.js @@ -1,7 +1,7 @@ 'use strict'; const url = require('url'); const qs = require('querystring'); -const util = require('util'); +const { expect } = require('chai'); const { MongoClient } = require('../../../src/mongo_client'); const { Topology } = require('../../../src/sdam/topology'); @@ -9,6 +9,18 @@ const { TopologyType } = require('../../../src/sdam/common'); const { parseURI } = require('../../../src/connection_string'); const { HostAddress } = require('../../../src/utils'); +/** + * @typedef {Object} UrlOptions + * @property {string} [db] - dbName to put in the path section override + * @property {string} [replicaSet] - replicaSet name override + * @property {string} [username] - Username for auth section + * @property {string} [password] - Password for auth section + * @property {string} [authMechanism] - Authmechanism name + * @property {Record} [authMechanismProperties] - additional options for auth mechanism + * @property {string} [authSource] - authSource override in searchParams of URI + * @property {boolean} [useMultipleMongoses] - if set will use concatenate all known HostAddresses in URI + */ + /** * @param {Record} obj */ @@ -168,75 +180,56 @@ class TestConfiguration { return new Topology(hosts, options); } - url(username, password, options) { - options = options || {}; - - const query = {}; - if (this.options.replicaSet) { - Object.assign(query, { replicaSet: this.options.replicaSet }); - } + /** + * Construct a connection URL using nodejs's whatwg URL similar to how connection_string.ts + * works + * + * @param {UrlOptions} [options] - overrides and settings for URI generation + */ + url(options) { + options = { db: this.options.db, replicaSet: this.options.replicaSet, ...options }; - let multipleHosts; - if (this.options.hosts.length > 1) { - // NOTE: The only way to force a sharded topology with the driver is to duplicate - // the host entry. This will eventually be solved by autodetection. - if (this.topologyType === TopologyType.Sharded) { - const firstHost = this.options.hostAddresses[0]; - multipleHosts = `${firstHost.host}:${firstHost.port}`; - } else { - multipleHosts = this.options.hostAddresses - .reduce((built, host) => { - built.push(typeof host.port === 'number' ? `${host.host}:${host.port}` : host.host); - return built; - }, []) - .join(','); - } - } + const FILLER_HOST = 'fillerHost'; - /** @type {Record} */ - const urlObject = { - protocol: 'mongodb', - slashes: true, - pathname: `/${this.options.db}`, - query - }; + const url = new URL(`mongodb://${FILLER_HOST}`); - if (multipleHosts) { - Object.assign(urlObject, { hostname: '%s' }); - } else { - Object.assign(urlObject, { - hostname: this.options.host, - port: this.options.port - }); + if (options.replicaSet) { + url.searchParams.append('replicaSet', options.replicaSet); } - if (username || password) { - urlObject.auth = password == null ? username : `${username}:${password}`; + url.pathname = `/${options.db}`; + + if (options.username) url.username = options.username; + if (options.password) url.password = options.password; - if (options.authMechanism || this.options.authMechanism) { - Object.assign(query, { - authMechanism: options.authMechanism || this.options.authMechanism - }); + if (options.username || options.password) { + if (options.authMechanism) { + url.searchParams.append('authMechanism', options.authMechanism); } - if (options.authMechanismProperties || this.options.authMechanismProperties) { - Object.assign(query, { - authMechanismProperties: convertToConnStringMap( - options.authMechanismProperties || this.options.authMechanismProperties - ) - }); + if (options.authMechanismProperties) { + url.searchParams.append( + 'authMechanismProperties', + convertToConnStringMap(options.authMechanismProperties) + ); } if (options.authSource) { - query.authSource = options.authSource; + url.searchParams.append('authSource', options.authSource); } } - if (multipleHosts) { - return util.format(url.format(urlObject), multipleHosts); + let actualHostsString; + if (options.useMultipleMongoses) { + expect(this.options.hostAddresses).to.have.length.greaterThan(1); + actualHostsString = this.options.hostAddresses.map(ha => ha.toString()).join(','); + } else { + actualHostsString = this.options.hostAddresses[0].toString(); } - return url.format(urlObject); + const connectionString = url.toString().replace(FILLER_HOST, actualHostsString); + + return connectionString; } writeConcernMax() {