Skip to content

Commit

Permalink
test: add bulk update NUT
Browse files Browse the repository at this point in the history
  • Loading branch information
cristiand391 committed Oct 22, 2024
1 parent a57409a commit 172c7d5
Show file tree
Hide file tree
Showing 5 changed files with 218 additions and 24 deletions.
2 changes: 1 addition & 1 deletion test/commands/data/import/bulk.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import path from 'node:path';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js';
import { generateAccountsCsv } from './resume.nut.js';
import { generateAccountsCsv } from '../../../testUtil.js';

describe('data import bulk NUTs', () => {
let session: TestSession;
Expand Down
24 changes: 1 addition & 23 deletions test/commands/data/import/resume.nut.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import path from 'node:path';
import { writeFile } from 'node:fs/promises';
import { EOL } from 'node:os';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js';
import { generateAccountsCsv } from '../../../testUtil.js';

describe('data import resume NUTs', () => {
let session: TestSession;
Expand Down Expand Up @@ -82,24 +81,3 @@ describe('data import resume NUTs', () => {
expect(importResumeResult.failedRecords).to.equal(0);
});
});

/**
* Generates a CSV file with 10_000 account records to insert
*
* Each `Account.name` field has a unique timestamp for idempotent runs.
*/
export async function generateAccountsCsv(savePath: string): Promise<string> {
const id = Date.now();

let csv = 'NAME,TYPE,PHONE,WEBSITE' + EOL;

for (let i = 1; i <= 10_000; i++) {
csv += `account ${id} #${i},Account,415-555-0000,http://www.accountImport${i}.com${EOL}`;
}

const accountsCsv = path.join(savePath, 'bulkImportAccounts1.csv');

await writeFile(accountsCsv, csv);

return accountsCsv;
}
74 changes: 74 additions & 0 deletions test/commands/data/update/bulk.nut.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
/*
* Copyright (c) 2024, salesforce.com, inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import path from 'node:path';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { Org } from '@salesforce/core';
import { generateUpdatedCsv, generateAccountsCsv } from '../../../testUtil.js';
import { DataUpdateBulkResult } from '../../../../src/commands/data/update/bulk.js';
import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js';

describe('data update bulk NUTs', () => {
let session: TestSession;

before(async () => {
session = await TestSession.create({
scratchOrgs: [
{
config: 'config/project-scratch-def.json',
setDefault: true,
},
],
project: { sourceDir: path.join('test', 'test-files', 'data-project') },
devhubAuthStrategy: 'AUTO',
});
});

after(async () => {
await session?.clean();
});

it('should bulk update account records', async () => {
const csvFile = await generateAccountsCsv(session.dir);

const result = execCmd<DataImportBulkResult>(
`data import bulk --file ${csvFile} --sobject Account --wait 10 --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataImportBulkResult;

// TODO: set org username above like here:
// https://github.com/salesforcecli/cli-plugins-testkit/blob/main/SAMPLES.md#testing-with-multiple-scratch-orgs
const username = [...session.orgs.keys()][0];

const conn = (
await Org.create({
aliasOrUsername: username,
})
).getConnection();

const importJob = conn.bulk2.job('ingest', {
id: result.jobId,
});

const successfulIds = (await importJob.getSuccessfulResults()).map((r) => r.sf__Id);

const updatedCsv = await generateUpdatedCsv(
csvFile,
successfulIds,
path.join(session.dir, 'data-project', 'updated.csv')
);

const dataUpdateResult = execCmd<DataUpdateBulkResult>(
`data update bulk --file ${updatedCsv} --sobject account --wait 10 --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataUpdateBulkResult;

expect(dataUpdateResult.processedRecords).to.equal(10_000);
expect(dataUpdateResult.successfulRecords).to.equal(10_000);
expect(dataUpdateResult.failedRecords).to.equal(0);
});
});
81 changes: 81 additions & 0 deletions test/commands/data/update/resume.nut.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
/*
* Copyright (c) 2024, salesforce.com, inc.
* All rights reserved.
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import path from 'node:path';
import { execCmd, TestSession } from '@salesforce/cli-plugins-testkit';
import { expect } from 'chai';
import { Org } from '@salesforce/core';
import { generateUpdatedCsv, generateAccountsCsv } from '../../../testUtil.js';
import { DataUpdateBulkResult } from '../../../../src/commands/data/update/bulk.js';
import { DataImportBulkResult } from '../../../../src/commands/data/import/bulk.js';

describe('data update resume NUTs', () => {
let session: TestSession;

before(async () => {
session = await TestSession.create({
scratchOrgs: [
{
config: 'config/project-scratch-def.json',
setDefault: true,
},
],
project: { sourceDir: path.join('test', 'test-files', 'data-project') },
devhubAuthStrategy: 'AUTO',
});
});

after(async () => {
await session?.clean();
});

it('should resume bulk udpate via--use-most-recent', async () => {
const csvFile = await generateAccountsCsv(session.dir);

const result = execCmd<DataImportBulkResult>(
`data import bulk --file ${csvFile} --sobject Account --wait 10 --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataImportBulkResult;

// TODO: set org username above like here:
// https://github.com/salesforcecli/cli-plugins-testkit/blob/main/SAMPLES.md#testing-with-multiple-scratch-orgs
const username = [...session.orgs.keys()][0];

const conn = (
await Org.create({
aliasOrUsername: username,
})
).getConnection();

const importJob = conn.bulk2.job('ingest', {
id: result.jobId,
});

const successfulIds = (await importJob.getSuccessfulResults()).map((r) => r.sf__Id);

const updatedCsv = await generateUpdatedCsv(
csvFile,
successfulIds,
path.join(session.dir, 'data-project', 'updated.csv')
);

const dataUpdateAsyncRes = execCmd<DataUpdateBulkResult>(
`data update bulk --file ${updatedCsv} --sobject account --async --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataUpdateBulkResult;

expect(dataUpdateAsyncRes.jobId).to.be.length(18);

const dataUpdateResumeRes = execCmd<DataUpdateBulkResult>(
`data update resume -i ${dataUpdateAsyncRes.jobId} --wait 10 --json`,
{ ensureExitCode: 0 }
).jsonOutput?.result as DataUpdateBulkResult;

expect(dataUpdateResumeRes.processedRecords).to.equal(10_000);
expect(dataUpdateResumeRes.successfulRecords).to.equal(10_000);
expect(dataUpdateResumeRes.failedRecords).to.equal(0);
});
});
61 changes: 61 additions & 0 deletions test/testUtil.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,16 @@
* Licensed under the BSD 3-Clause license.
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import path from 'node:path';
import * as fs from 'node:fs';
import { EOL } from 'node:os';
import { writeFile } from 'node:fs/promises';
import { PassThrough, Writable } from 'node:stream';
import { pipeline } from 'node:stream/promises';
import { promisify } from 'node:util';
import { exec as execSync } from 'node:child_process';
import { Connection } from '@salesforce/core';
import { stringify as csvStringify } from 'csv-stringify/sync';

/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unused-vars */
Expand Down Expand Up @@ -213,3 +217,60 @@ export async function validateJson(filePath: string, totalqty: number): Promise<

expect(parseInt(lengthRes.stdout.trim(), 10)).equal(totalqty);
}

export async function generateUpdatedCsv(sourceCsv: string, ids: string[], savePath: string) {
const csvReadStream = fs.createReadStream(sourceCsv);
const modifiedRows: Array<{ NAME: string; ID?: string }> = [];
let counter = 0;

await pipeline(
csvReadStream,
new csvParse({ columns: true, delimiter: ',' }),
new PassThrough({
objectMode: true,
transform(row: { NAME: string; ID?: string }, _encoding, callback) {
row.ID = ids[counter];
const modifiedRow = { ID: row['ID'], ...row };
modifiedRows.push(modifiedRow);
counter++;
callback(null, null);
},
}),
// dummy writable
new Writable({
write(_chunk, _encoding, callback) {
callback();
},
})
);

await writeFile(
savePath,
csvStringify(modifiedRows, {
header: true,
})
);

return savePath;
}

/**
* Generates a CSV file with 10_000 account records to insert
*
* Each `Account.name` field has a unique timestamp for idempotent runs.
*/
export async function generateAccountsCsv(savePath: string): Promise<string> {
const id = Date.now();

let csv = 'NAME,TYPE,PHONE,WEBSITE' + EOL;

for (let i = 1; i <= 10_000; i++) {
csv += `account ${id} #${i},Account,415-555-0000,http://www.accountImport${i}.com${EOL}`;
}

const accountsCsv = path.join(savePath, 'bulkImportAccounts1.csv');

await writeFile(accountsCsv, csv);

return accountsCsv;
}

0 comments on commit 172c7d5

Please sign in to comment.