diff --git a/test/commands/data/import/bulk.nut.ts b/test/commands/data/import/bulk.nut.ts index 22506088..119135ac 100644 --- a/test/commands/data/import/bulk.nut.ts +++ b/test/commands/data/import/bulk.nut.ts @@ -8,7 +8,7 @@ import path from 'node:path'; import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; import { expect } from 'chai'; import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; -import { generateAccountsCsv } from './resume.nut.js'; +import { generateAccountsCsv } from '../../../testUtil.js'; describe('data import bulk NUTs', () => { let session: TestSession; diff --git a/test/commands/data/import/resume.nut.ts b/test/commands/data/import/resume.nut.ts index 0b6a667b..918c598f 100644 --- a/test/commands/data/import/resume.nut.ts +++ b/test/commands/data/import/resume.nut.ts @@ -5,11 +5,10 @@ * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ import path from 'node:path'; -import { writeFile } from 'node:fs/promises'; -import { EOL } from 'node:os'; import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; import { expect } from 'chai'; import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; +import { generateAccountsCsv } from '../../../testUtil.js'; describe('data import resume NUTs', () => { let session: TestSession; @@ -82,24 +81,3 @@ describe('data import resume NUTs', () => { expect(importResumeResult.failedRecords).to.equal(0); }); }); - -/** - * Generates a CSV file with 10_000 account records to insert - * - * Each `Account.name` field has a unique timestamp for idempotent runs. - */ -export async function generateAccountsCsv(savePath: string): Promise { - const id = Date.now(); - - let csv = 'NAME,TYPE,PHONE,WEBSITE' + EOL; - - for (let i = 1; i <= 10_000; i++) { - csv += `account ${id} #${i},Account,415-555-0000,http://www.accountImport${i}.com${EOL}`; - } - - const accountsCsv = path.join(savePath, 'bulkImportAccounts1.csv'); - - await writeFile(accountsCsv, csv); - - return accountsCsv; -} diff --git a/test/commands/data/update/bulk.nut.ts b/test/commands/data/update/bulk.nut.ts new file mode 100644 index 00000000..5ee87045 --- /dev/null +++ b/test/commands/data/update/bulk.nut.ts @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2024, salesforce.com, inc. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +import path from 'node:path'; +import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; +import { expect } from 'chai'; +import { Org } from '@salesforce/core'; +import { generateUpdatedCsv, generateAccountsCsv } from '../../../testUtil.js'; +import { DataUpdateBulkResult } from '../../../../src/commands/data/update/bulk.js'; +import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; + +describe('data update bulk NUTs', () => { + let session: TestSession; + + before(async () => { + session = await TestSession.create({ + scratchOrgs: [ + { + config: 'config/project-scratch-def.json', + setDefault: true, + }, + ], + project: { sourceDir: path.join('test', 'test-files', 'data-project') }, + devhubAuthStrategy: 'AUTO', + }); + }); + + after(async () => { + await session?.clean(); + }); + + it('should bulk update account records', async () => { + const csvFile = await generateAccountsCsv(session.dir); + + const result = execCmd( + `data import bulk --file ${csvFile} --sobject Account --wait 10 --json`, + { ensureExitCode: 0 } + ).jsonOutput?.result as DataImportBulkResult; + + // TODO: set org username above like here: + // https://github.com/salesforcecli/cli-plugins-testkit/blob/main/SAMPLES.md#testing-with-multiple-scratch-orgs + const username = [...session.orgs.keys()][0]; + + const conn = ( + await Org.create({ + aliasOrUsername: username, + }) + ).getConnection(); + + const importJob = conn.bulk2.job('ingest', { + id: result.jobId, + }); + + const successfulIds = (await importJob.getSuccessfulResults()).map((r) => r.sf__Id); + + const updatedCsv = await generateUpdatedCsv( + csvFile, + successfulIds, + path.join(session.dir, 'data-project', 'updated.csv') + ); + + const dataUpdateResult = execCmd( + `data update bulk --file ${updatedCsv} --sobject account --wait 10 --json`, + { ensureExitCode: 0 } + ).jsonOutput?.result as DataUpdateBulkResult; + + expect(dataUpdateResult.processedRecords).to.equal(10_000); + expect(dataUpdateResult.successfulRecords).to.equal(10_000); + expect(dataUpdateResult.failedRecords).to.equal(0); + }); +}); diff --git a/test/commands/data/update/resume.nut.ts b/test/commands/data/update/resume.nut.ts new file mode 100644 index 00000000..238218ec --- /dev/null +++ b/test/commands/data/update/resume.nut.ts @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2024, salesforce.com, inc. + * All rights reserved. + * Licensed under the BSD 3-Clause license. + * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +import path from 'node:path'; +import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit'; +import { expect } from 'chai'; +import { Org } from '@salesforce/core'; +import { generateUpdatedCsv, generateAccountsCsv } from '../../../testUtil.js'; +import { DataUpdateBulkResult } from '../../../../src/commands/data/update/bulk.js'; +import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js'; + +describe('data update resume NUTs', () => { + let session: TestSession; + + before(async () => { + session = await TestSession.create({ + scratchOrgs: [ + { + config: 'config/project-scratch-def.json', + setDefault: true, + }, + ], + project: { sourceDir: path.join('test', 'test-files', 'data-project') }, + devhubAuthStrategy: 'AUTO', + }); + }); + + after(async () => { + await session?.clean(); + }); + + it('should resume bulk udpate via--use-most-recent', async () => { + const csvFile = await generateAccountsCsv(session.dir); + + const result = execCmd( + `data import bulk --file ${csvFile} --sobject Account --wait 10 --json`, + { ensureExitCode: 0 } + ).jsonOutput?.result as DataImportBulkResult; + + // TODO: set org username above like here: + // https://github.com/salesforcecli/cli-plugins-testkit/blob/main/SAMPLES.md#testing-with-multiple-scratch-orgs + const username = [...session.orgs.keys()][0]; + + const conn = ( + await Org.create({ + aliasOrUsername: username, + }) + ).getConnection(); + + const importJob = conn.bulk2.job('ingest', { + id: result.jobId, + }); + + const successfulIds = (await importJob.getSuccessfulResults()).map((r) => r.sf__Id); + + const updatedCsv = await generateUpdatedCsv( + csvFile, + successfulIds, + path.join(session.dir, 'data-project', 'updated.csv') + ); + + const dataUpdateAsyncRes = execCmd( + `data update bulk --file ${updatedCsv} --sobject account --async --json`, + { ensureExitCode: 0 } + ).jsonOutput?.result as DataUpdateBulkResult; + + expect(dataUpdateAsyncRes.jobId).to.be.length(18); + + const dataUpdateResumeRes = execCmd( + `data update resume -i ${dataUpdateAsyncRes.jobId} --wait 10 --json`, + { ensureExitCode: 0 } + ).jsonOutput?.result as DataUpdateBulkResult; + + expect(dataUpdateResumeRes.processedRecords).to.equal(10_000); + expect(dataUpdateResumeRes.successfulRecords).to.equal(10_000); + expect(dataUpdateResumeRes.failedRecords).to.equal(0); + }); +}); diff --git a/test/testUtil.ts b/test/testUtil.ts index 42379450..25c996ee 100644 --- a/test/testUtil.ts +++ b/test/testUtil.ts @@ -4,12 +4,16 @@ * Licensed under the BSD 3-Clause license. * For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ +import path from 'node:path'; import * as fs from 'node:fs'; +import { EOL } from 'node:os'; +import { writeFile } from 'node:fs/promises'; import { PassThrough, Writable } from 'node:stream'; import { pipeline } from 'node:stream/promises'; import { promisify } from 'node:util'; import { exec as execSync } from 'node:child_process'; import { Connection } from '@salesforce/core'; +import { stringify as csvStringify } from 'csv-stringify/sync'; /* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-unused-vars */ @@ -213,3 +217,60 @@ export async function validateJson(filePath: string, totalqty: number): Promise< expect(parseInt(lengthRes.stdout.trim(), 10)).equal(totalqty); } + +export async function generateUpdatedCsv(sourceCsv: string, ids: string[], savePath: string) { + const csvReadStream = fs.createReadStream(sourceCsv); + const modifiedRows: Array<{ NAME: string; ID?: string }> = []; + let counter = 0; + + await pipeline( + csvReadStream, + new csvParse({ columns: true, delimiter: ',' }), + new PassThrough({ + objectMode: true, + transform(row: { NAME: string; ID?: string }, _encoding, callback) { + row.ID = ids[counter]; + const modifiedRow = { ID: row['ID'], ...row }; + modifiedRows.push(modifiedRow); + counter++; + callback(null, null); + }, + }), + // dummy writable + new Writable({ + write(_chunk, _encoding, callback) { + callback(); + }, + }) + ); + + await writeFile( + savePath, + csvStringify(modifiedRows, { + header: true, + }) + ); + + return savePath; +} + +/** + * Generates a CSV file with 10_000 account records to insert + * + * Each `Account.name` field has a unique timestamp for idempotent runs. + */ +export async function generateAccountsCsv(savePath: string): Promise { + const id = Date.now(); + + let csv = 'NAME,TYPE,PHONE,WEBSITE' + EOL; + + for (let i = 1; i <= 10_000; i++) { + csv += `account ${id} #${i},Account,415-555-0000,http://www.accountImport${i}.com${EOL}`; + } + + const accountsCsv = path.join(savePath, 'bulkImportAccounts1.csv'); + + await writeFile(accountsCsv, csv); + + return accountsCsv; +}