diff --git a/demo/issues-esm/lib/411.csv b/demo/issues-esm/lib/411.csv index f560e243..cab0a2a3 100644 --- a/demo/issues-esm/lib/411.csv +++ b/demo/issues-esm/lib/411.csv @@ -1,10 +1,4 @@ -id,amount,timestamp -1,$33.60,05/26/2022 -2,$91.47,08/24/2022 -3,$88.70,06/15/2022 -4,$46.97,06/11/2022,faulty -5,$93.78,05/05/2022 -6,$56.80,05/07/2022 -7,$3.54,05/12/2022 -8,$57.59,07/05/2022 -9,$76.49,05/02/2022 +id,first_name,last_name,email,modified_at +1,Ring,Grinyov,rgrinyov0@weebly.com,2022-02-14 +2,Kylie,Lauderdale,klauderdale1@wsj.com,2022-02-14, +3,Cammi,Bendix,cbendix2@tuttocitta.it,2022-02-14 diff --git a/demo/issues-esm/lib/411.js b/demo/issues-esm/lib/411.js index 374b93ee..d57a83a1 100644 --- a/demo/issues-esm/lib/411.js +++ b/demo/issues-esm/lib/411.js @@ -1,27 +1,38 @@ -import assert from 'node:assert'; -import { createReadStream } from 'node:fs'; -import { Writable } from 'node:stream' -import { finished } from 'node:stream/promises'; +import path from 'path'; +import { pipeline } from 'stream/promises'; +import { parse as parseCSV } from 'csv-parse'; +import { Writable } from 'stream'; +import { createReadStream } from 'fs'; import desm from "desm"; -import { parse } from 'csv-parse'; - const __dirname = desm(import.meta.url); -const errors = [] -const parser = parse({ - bom: true, - skipRecordsWithError: true, -}); -// Create a stream and consume its source -const sink = new Writable ({objectMode: true, write: (_, __, callback) => callback()}) -const outStream = createReadStream(`${__dirname}/411.csv`).pipe(parser).pipe(sink); -// Catch records with errors -parser.on('skip', (e) => { - errors.push(e); -}); -// Wait for stream to be consumed -await finished(outStream); -// Catch error from skip event -assert.deepStrictEqual(errors.map(e => e.message), [ - 'Invalid Record Length: expect 3, got 4 on line 5' -]) +async function testRecordsSkip() { + const errors = []; + const records = []; + + const sink = new Writable({ + objectMode: true, + write: (_, __, callback) => { + records.push(_); + callback(); + }, + }); + + const csvSource = createReadStream(path.join(__dirname, '411.csv')); + const parser = parseCSV({ + skip_records_with_error: true, + bom: true, + }); + parser.on('skip', function (err) { + errors.push(err); + }); + + await pipeline(csvSource, parser, sink); + + console.log({ + records, + errors, + }); +} + +testRecordsSkip().catch(console.error);