From f1d04d0c880055b7b5b4c6806f8d2bc509c575ec Mon Sep 17 00:00:00 2001 From: Juanjo Diaz Date: Fri, 18 Oct 2019 03:46:19 +0200 Subject: [PATCH] feat: Add support for transforms (#431) BREAKING CHANGE: module no longer takes `unwind`, `unwindBlank`, `flatten` or the `flattenSeparator` options, instead see the new `transforms` option. CLI options are unchanged from the callers side, but use the built in transforms under the hood. * Add support for transforms * Add documentation about transforms --- README.md | 240 ++++++++++----- bin/json2csv.js | 32 +- lib/JSON2CSVBase.js | 118 +------- lib/JSON2CSVParser.js | 4 +- lib/JSON2CSVTransform.js | 1 - lib/json2csv.js | 7 + lib/transforms/flatten.js | 31 ++ lib/transforms/unwind.js | 40 +++ test/CLI.js | 140 ++++----- test/JSON2CSVAsyncParser.js | 228 +++++++------- test/JSON2CSVParser.js | 202 +++++++------ test/JSON2CSVTransform.js | 297 ++++++++++--------- test/fixtures/csv/defaultCustomTransform.csv | 5 + 13 files changed, 739 insertions(+), 606 deletions(-) create mode 100644 lib/transforms/flatten.js create mode 100644 lib/transforms/unwind.js create mode 100644 test/fixtures/csv/defaultCustomTransform.csv diff --git a/README.md b/README.md index 3395d9a3..ade75853 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ See the [CHANGELOG] for details about the latest release. - Fast and lightweight - Scalable to infinitely large datasets (using stream processing) - Support for standard JSON as well as NDJSON -- Advanced data selection (automatic field discovery, underscore-like selectors, custom data getters, default values for missing fields, flattening nested object, unwinding arrays, etc.) +- Advanced data selection (automatic field discovery, underscore-like selectors, custom data getters, default values for missing fields, transforms, etc.) - Highly customizable (supportting custom quotation marks, delimiters, eol values, etc.) - Automatic escaping (preserving new lines, quotes, etc. in them) - Optional headers @@ -55,39 +55,37 @@ By default, the above script will get the latest release of json2csv. You can al ```bash Usage: json2csv [options] - Options: - -V, --version output the version number - -c, --config Specify a file with a valid JSON configuration. -i, --input Path and name of the incoming json file. Defaults to stdin. -o, --output [output] Path and name of the resulting csv file. Defaults to stdout. + -c, --config Specify a file with a valid JSON configuration. -n, --ndjson Treat the input as NewLine-Delimited JSON. -s, --no-streaming Process the whole JSON array in memory instead of doing it line by line. -f, --fields List of fields to process. Defaults to field auto-detection. - -u, --unwind Creates multiple rows from a single JSON document similar to MongoDB unwind. - -B, --unwind-blank When unwinding, blank out instead of repeating data. - -F, --flatten Flatten nested objects. - -S, --flatten-separator Flattened keys separator. Defaults to '.'. -v, --default-value [defaultValue] Default value to use for missing fields. - -q, --quote [value] Character(s) to use a quote mark. Defaults to '"'. - -Q, --escaped-quote [value] Character(s) to use as a escaped quote. Defaults to a double `quote`, '""'. - -d, --delimiter [delimiter] Character(s) to use as delimiter. Defaults to ','. - -e, --eol [value] Character(s) to use as End-of-Line for separating rows. Defaults to '\n'. + -q, --quote [quote] Character(s) to use as quote mark. Defaults to '"'. + -Q, --escaped-quote [escapedQuote] Character(s) to use as a escaped quote. Defaults to a double `quote`, '""'. + -d, --delimiter [delimiter] Character(s) to use as delimiter. Defaults to ','. (default: ",") + -e, --eol [eol] Character(s) to use as End-of-Line for separating rows. Defaults to '\n'. (default: "\n") -E, --excel-strings Wraps string data to force Excel to interpret it as string even if it contains a number. -H, --no-header Disable the column name header. -a, --include-empty-rows Includes empty rows in the resulting CSV output. -b, --with-bom Includes BOM character at the beginning of the CSV. -p, --pretty Print output as a pretty table. Use only when printing to console. + -u, --unwind Creates multiple rows from a single JSON document similar to MongoDB unwind. + -B, --unwind-blank When unwinding, blank out instead of repeating data. + -F, --flatten Flatten nested objects. + -S, --flatten-separator Flattened keys separator. Defaults to '.'. -h, --help output usage information ``` If no input `-i` is specified the result is expected from to the console standard input. If no output `-o` is specified the result is printed to the console standard output. -If no fields `-f` or `-c` are passed the fields of the first element are used since json2csv CLI process the items one at a time. You can use the `--no-streaming` flag to load the entire JSON in memory and get all the headers. However, keep in mind that this is slower and requires much more memory. +If no fields `-f` or config `-c` are passed the fields of the first element are used since json2csv CLI process the items one at a time. You can use the `--no-streaming` flag to load the entire JSON in memory and get all the headers. However, keep in mind that this is slower and requires much more memory. Use `-p` to show the result as a table in the console. -Any option pass through a config file `-c` will be overriden if a specific flag is passed as well. For example, the fields option of the config will be overriden if the fields flag `-f` is used. +Any option passed through the config file `-c` will be overriden if a specific flag is passed as well. For example, the fields option of the config will be overriden if the fields flag `-f` is used. ### CLI examples @@ -177,10 +175,7 @@ The programatic APIs take a configuration object very equivalent to the CLI opti - `fields` - Array of Objects/Strings. Defaults to toplevel JSON attributes. See example below. - `ndjson` - Only effective on the streaming API. Indicates that data coming through the stream is NDJSON. -- `unwind` - Array of Strings, creates multiple rows from a single JSON document similar to MongoDB's $unwind -- `unwindBlank` - Boolean, unwind using blank values instead of repeating data. -- `flatten` - Boolean, flattens nested objects. Defaults to `false`. -- `flattenSeparator` - String, separator to use between nested JSON keys when `flatten` option enabled. Defaults to `.` if not specified. +- `transforms` - Array of transforms to be applied to each data item. A transform is simply a function that receives a data item and returns the transformed item. - `defaultValue` - String, default value to use when missing data. Defaults to `` if not specified. (Overridden by `fields[].default`) - `quote` - String, quote around cell values and column names. Defaults to `"` if not specified. - `escapedQuote` - String, the value to replace escaped quotes in strings. Defaults to 2x`quotes` (for example `""`) if not specified. @@ -341,23 +336,88 @@ json2csv The stream API can also work on object mode. This is useful when you have an input stream in object mode or if you are getting JSON objects one by one and want to convert them to CSV as they come. ```js - const { Transform } = require("json2csv"); - const { Readable } = require('stream'); +const { Transform } = require("json2csv"); +const { Readable } = require('stream'); + +const input = new Readable({ objectMode: true }); +input._read = () => {}; +// myObjectEmitter is just a fake example representing anything that emit objects. +myObjectEmitter.on('object', obj => input.push(obj)); +// Pushing a null close the stream +myObjectEmitter.end(()) => input.push(null)); + +const output = process.stdout; + +const opts = {}; +const transformOpts = { objectMode: true }; + +const json2csv = new Transform(opts, transformOpts); +const processor = input.pipe(json2csv).pipe(output); +``` + +### Data transforms + +json2csv supports data transforms. A transform is simply a function that receives a data item and returns the transformed item. + + +#### Custom transforms + +```js +function (item) { + // apply tranformations or create new object + return transformedItem; +} +``` +or using ES6 +```js +(item) => { + // apply tranformations or create new object + return transformedItem; +} +``` + +For example, let's add a line counter to our CSV, capitalize the car field and change the pize to be in Ks (1000s). +```js +let counter = 1; +(item) => ({ counter: counter++, ...item, car: item.car.toUpperCase(), price: item.price / 1000 }); +``` + +#### Built-in transforms + +There is a number of built-in transform provider by the library. + +```js +const { transforms: { unwind, flatten } } = require('json2csv'); +``` + +##### Unwind + +The unwind transform deconstructs an array field from the input item to output a row for each element. Is's similar to MongoDB's $unwind aggregation. + +The transform needs to be instantiated and takes 2 arguments: +- `paths` - Array of String, list the paths to the fields to be unwound. It's mandatory and should not be empty. +- `blank` - Boolean, unwind using blank values instead of repeating data. Defaults to `false`. + +```js +// Default +unwind(['fieldToUnwind']); + +// Blanking out repeated data +unwind(['fieldToUnwind'], true); +``` - const input = new Readable({ objectMode: true }); - input._read = () => {}; - // myObjectEmitter is just a fake example representing anything that emit objects. - myObjectEmitter.on('object', obj => input.push(obj)); - // Pushing a null close the stream - myObjectEmitter.end(()) => input.push(null)); +##### Flatten +Flatten nested javascript objects into a single level object. - const output = process.stdout; +The transform needs to be instantiated and takes 1 argument: +- `separator` - String, separator to use between nested JSON keys when flattening a field. Defaults to `.`. - const opts = {}; - const transformOpts = { objectMode: true }; +```js +// Default +flatten(); - const json2csv = new Transform(opts, transformOpts); - const processor = input.pipe(json2csv).pipe(output); +// Custom separator '__' +flatten('_'); ``` ### Javascript module examples @@ -402,7 +462,6 @@ The stream API can also work on object mode. This is useful when you have an inp ```js const { Parser } = require('json2csv'); -const fields = ['car', 'price', 'color']; const myCars = [ { "car": "Audi", @@ -419,7 +478,7 @@ const myCars = [ } ]; -const json2csvParser = new Parser({ fields }); +const json2csvParser = new Parser(); const csv = json2csvParser.parse(myCars); console.log(csv); @@ -428,7 +487,7 @@ console.log(csv); will output to console ``` -car, price, color +"car", "price", "color" "Audi", 40000, "blue" "BMW", 35000, "black" "Porsche", 60000, "green" @@ -436,7 +495,7 @@ car, price, color #### Example 2 -Similarly to [mongoexport](http://www.mongodb.org/display/DOCS/mongoexport) you can choose which fields to export. +You can choose which fields to include in the CSV. ```js const { Parser } = require('json2csv'); @@ -448,10 +507,10 @@ const csv = json2csvParser.parse(myCars); console.log(csv); ``` -Results in +will output to console ``` -car, color +"car", "color" "Audi", "blue" "BMW", "black" "Porsche", "green" @@ -478,6 +537,15 @@ const csv = json2csvParser.parse(myCars); console.log(csv); ``` +will output to console + +``` +"Car Name", "Price USD" +"Audi", 40000 +"BMW", 35000 +"Porsche", 60000 +``` + #### Example 4 You can also specify nested properties using dot notation. @@ -485,23 +553,24 @@ You can also specify nested properties using dot notation. ```js const { Parser } = require('json2csv'); -const fields = ['car.make', 'car.model', 'price', 'color']; const myCars = [ { - "car": {"make": "Audi", "model": "A3"}, + "car": { "make": "Audi", "model": "A3" }, "price": 40000, "color": "blue" }, { - "car": {"make": "BMW", "model": "F20"}, + "car": { "make": "BMW", "model": "F20" }, "price": 35000, "color": "black" }, { - "car": {"make": "Porsche", "model": "9PA AF1"}, + "car": { "make": "Porsche", "model": "9PA AF1" }, "price": 60000, "color": "green" } ]; +const fields = ['car.make', 'car.model', 'price', 'color']; + const json2csvParser = new Parser({ fields }); const csv = json2csvParser.parse(myCars); @@ -511,7 +580,7 @@ console.log(csv); will output to console ``` -car.make, car.model, price, color +"car.make", "car.model", "price", "color" "Audi", "A3", 40000, "blue" "BMW", "F20", 35000, "black" "Porsche", "9PA AF1", 60000, "green" @@ -524,25 +593,23 @@ Use a custom delimiter to create tsv files using the delimiter option: ```js const { Parser } = require('json2csv'); -const fields = ['car', 'price', 'color']; - -const json2csvParser = new Parser({ fields, delimiter: '\t' }); +const json2csvParser = new Parser({ delimiter: '\t' }); const tsv = json2csvParser.parse(myCars); console.log(tsv); ``` -Will output: +will output to console ``` -car price color +"car" "price" "color" "Audi" 10000 "blue" "BMW" 15000 "red" "Mercedes" 20000 "yellow" "Porsche" 30000 "green" ``` -If no delimiter is specified, the default `,` is used +If no delimiter is specified, the default `,` is used. #### Example 6 @@ -551,37 +618,28 @@ You can choose custom quotation marks. ```js const { Parser } = require('json2csv'); -const fields = [{ - label: 'Car Name', - value: 'car' -},{ - label: 'Price USD', - value: 'price' -}]; - -const json2csvParser = new Parser({ fields, quote: '' }); +const json2csvParser = new Parser({ quote: '' }); const csv = json2csvParser.parse(myCars); console.log(csv); ``` -Results in +will output to console ``` -Car Name, Price USD -Audi, 10000 -BMW, 15000 -Porsche, 30000 +car, price, color +Audi, 40000, blue +BMW", 35000, black +Porsche", 60000, green ``` #### Example 7 -You can unwind arrays similar to MongoDB's $unwind operation using the `unwind` option. +You can unwind arrays similar to MongoDB's $unwind operation using the `unwind` transform. ```js -const { Parser } = require('json2csv'); +const { Parser, transforms: { unwind } } = require('json2csv'); -const fields = ['carModel', 'price', 'colors']; const myCars = [ { "carModel": "Audi", @@ -602,7 +660,10 @@ const myCars = [ } ]; -const json2csvParser = new Parser({ fields, unwind: 'colors' }); +const fields = ['carModel', 'price', 'colors']; +const transforms = [unwind('colors')]; + +const json2csvParser = new Parser({ fields, transforms }); const csv = json2csvParser.parse(myCars); console.log(csv); @@ -630,7 +691,6 @@ You can also unwind arrays multiple times or with nested objects. ```js const { Parser } = require('json2csv'); -const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; const myCars = [ { "carModel": "BMW", @@ -675,7 +735,9 @@ const myCars = [ } ]; -const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'] }); +const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; +const transforms = [unwind(['items', 'items.items'])]; +const json2csvParser = new Parser({ fields, transforms }); const csv = json2csvParser.parse(myCars); console.log(csv); @@ -700,7 +762,6 @@ You can also unwind arrays blanking the repeated fields. ```js const { Parser } = require('json2csv'); -const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; const myCars = [ { "carModel": "BMW", @@ -745,7 +806,10 @@ const myCars = [ } ]; -const json2csvParser = new Parser({ fields, unwind: ['items', 'items.items'], unwindBlank: true }); +const fields = ['carModel', 'price', 'items.name', 'items.color', 'items.items.position', 'items.items.color']; +const transforms = [unwind(['items', 'items.items'], true)]; + +const json2csvParser = new Parser({ fields, transforms }); const csv = json2csvParser.parse(myCars); console.log(csv); @@ -763,7 +827,9 @@ will output to console ,,,,"right","black" ``` -### Migrating from 3.X to 4.X +### Migrations + +#### Migrating from 3.X to 4.X What in 3.X used to be ```js @@ -771,10 +837,10 @@ const json2csv = require('json2csv'); const csv = json2csv({ data: myData, fields: myFields, unwindPath: paths, ... }); ``` -can be replaced by +should be replaced by ```js -const Json2csvParser = require('json2csv').Parser; -const json2csvParser = new Json2csvParser({ fields: myFields, unwind: paths, ... }); +const { Parser } = require('json2csv'); +const json2csvParser = new Parser({ fields: myFields, unwind: paths, ... }); const csv = json2csvParser.parse(myData); ``` @@ -787,6 +853,32 @@ const csv = json2csv.parse(myData, { fields: myFields, unwind: paths, ... }); Please note that many of the configuration parameters have been slightly renamed. Please check one by one that all your parameters are correct. You can se the documentation for json2csv 3.11.5 [here](https://github.com/zemirco/json2csv/blob/v3.11.5/README.md). +#### Migrating from 4.X to 5.X + +In the CLI, the config file option, `-c`, used to be a list of fields and now it's expected to be a full configuration object. + +The `stringify` option hass been removed. + +`doubleQuote` has been renamed to `escapedQuote`. + +The `unwind` and `flatten` -related options has been moved to their own transforms. + +What used to be +```js +const { Parser } = require('json2csv'); +const json2csvParser = new Parser({ unwind: paths, unwindBlank: true, flatten: true, flattenSeparator: '__' }); +const csv = json2csvParser.parse(myData); +``` + +should be replaced by +```js +const { Parser, transform: { unwind, flatten } } = require('json2csv'); +const json2csvParser = new Parser({ transforms: [unwind(paths, true), flatten('__')] }); +const csv = json2csvParser.parse(myData); +``` + +You can se the documentation for json2csv v4.X.X [here](https://github.com/zemirco/json2csv/blob/v4/README.md). + ## Known Gotchas ### Excel support diff --git a/bin/json2csv.js b/bin/json2csv.js index 0b2b992d..2a1158c6 100755 --- a/bin/json2csv.js +++ b/bin/json2csv.js @@ -17,6 +17,7 @@ const writeFile = promisify(writeFileOrig); const isAbsolutePath = promisify(isAbsolute); const joinPath = promisify(join); +const { unwind, flatten } = json2csv.transforms; const JSON2CSVParser = json2csv.Parser; const Json2csvTransform = json2csv.Transform; @@ -28,20 +29,21 @@ program .option('-n, --ndjson', 'Treat the input as NewLine-Delimited JSON.') .option('-s, --no-streaming', 'Process the whole JSON array in memory instead of doing it line by line.') .option('-f, --fields ', 'List of fields to process. Defaults to field auto-detection.') - .option('-u, --unwind ', 'Creates multiple rows from a single JSON document similar to MongoDB unwind.') - .option('-B, --unwind-blank', 'When unwinding, blank out instead of repeating data.') - .option('-F, --flatten', 'Flatten nested objects.') - .option('-S, --flatten-separator ', 'Flattened keys separator. Defaults to \'.\'.') .option('-v, --default-value [defaultValue]', 'Default value to use for missing fields.') .option('-q, --quote [quote]', 'Character(s) to use as quote mark. Defaults to \'"\'.') .option('-Q, --escaped-quote [escapedQuote]', 'Character(s) to use as a escaped quote. Defaults to a double `quote`, \'""\'.') - .option('-d, --delimiter [delimiter]', 'Character(s) to use as delimiter. Defaults to \',\'.') - .option('-e, --eol [eol]', 'Character(s) to use as End-of-Line for separating rows. Defaults to \'\\n\'.') + .option('-d, --delimiter [delimiter]', 'Character(s) to use as delimiter. Defaults to \',\'.', ',') + .option('-e, --eol [eol]', 'Character(s) to use as End-of-Line for separating rows. Defaults to \'\\n\'.', os.EOL) .option('-E, --excel-strings','Wraps string data to force Excel to interpret it as string even if it contains a number.') .option('-H, --no-header', 'Disable the column name header.') .option('-a, --include-empty-rows', 'Includes empty rows in the resulting CSV output.') .option('-b, --with-bom', 'Includes BOM character at the beginning of the CSV.') .option('-p, --pretty', 'Print output as a pretty table. Use only when printing to console.') + // Built-in transforms + .option('-u, --unwind ', 'Creates multiple rows from a single JSON document similar to MongoDB unwind.') + .option('-B, --unwind-blank', 'When unwinding, blank out instead of repeating data.') + .option('-F, --flatten', 'Flatten nested objects.') + .option('-S, --flatten-separator ', 'Flattened keys separator. Defaults to \'.\'.') .parse(process.argv); function makePathAbsolute(filePath) { @@ -54,11 +56,6 @@ program.input = makePathAbsolute(program.input); program.output = makePathAbsolute(program.output); program.config = makePathAbsolute(program.config); -if (program.fields) program.fields = program.fields.split(','); -if (program.unwind) program.unwind = program.unwind.split(','); -program.delimiter = program.delimiter || ','; -program.eol = program.eol || os.EOL; - // don't fail if piped to e.g. head /* istanbul ignore next */ process.stdout.on('error', (error) => { @@ -137,13 +134,16 @@ async function processStream(config, opts) { (async (program) => { try { const config = Object.assign({}, program.config ? require(program.config) : {}, program); + + const transforms = []; + if (config.unwind) transforms.push(unwind(config.unwind.split(','), config.unwindBlank || false)); + if (config.flatten) transforms.push(flatten(config.flattenSeparator || '.')); const opts = { - fields: config.fields, - unwind: config.unwind, - unwindBlank: config.unwindBlank, - flatten: config.flatten, - flattenSeparator: config.flattenSeparator, + transforms, + fields: config.fields + ? (Array.isArray(config.fields) ? config.fields : config.fields.split(',')) + : config.fields, defaultValue: config.defaultValue, quote: config.quote, escapedQuote: config.escapedQuote, diff --git a/lib/JSON2CSVBase.js b/lib/JSON2CSVBase.js index 697dd5a0..466d9595 100644 --- a/lib/JSON2CSVBase.js +++ b/lib/JSON2CSVBase.js @@ -2,12 +2,11 @@ const os = require('os'); const lodashGet = require('lodash.get'); -const { getProp, setProp, fastJoin, flattenReducer } = require('./utils'); +const { getProp, fastJoin, flattenReducer } = require('./utils'); class JSON2CSVBase { constructor(opts) { this.opts = this.preprocessOpts(opts); - this.preprocessRow = this.memoizePreprocessRow(); } /** @@ -18,14 +17,13 @@ class JSON2CSVBase { */ preprocessOpts(opts) { const processedOpts = Object.assign({}, opts); - processedOpts.unwind = !Array.isArray(processedOpts.unwind) - ? (processedOpts.unwind ? [processedOpts.unwind] : []) - : processedOpts.unwind + processedOpts.transforms = !Array.isArray(processedOpts.transforms) + ? (processedOpts.transforms ? [processedOpts.transforms] : []) + : processedOpts.transforms processedOpts.delimiter = processedOpts.delimiter || ','; - processedOpts.flattenSeparator = processedOpts.flattenSeparator || '.'; processedOpts.eol = processedOpts.eol || os.EOL; processedOpts.quote = typeof processedOpts.quote === 'string' - ? opts.quote + ? processedOpts.quote : '"'; processedOpts.escapedQuote = typeof processedOpts.escapedQuote === 'string' ? processedOpts.escapedQuote @@ -100,39 +98,16 @@ class JSON2CSVBase { ); } - memoizePreprocessRow() { - if (this.opts.unwind && this.opts.unwind.length) { - if (this.opts.flatten) { - return function (row) { - return this.unwindData(row, this.opts.unwind) - .map(row => this.flatten(row, this.opts.flattenSeparator)); - }; - } - - return function (row) { - return this.unwindData(row, this.opts.unwind); - }; - } - - if (this.opts.flatten) { - return function (row) { - return [this.flatten(row, this.opts.flattenSeparator)]; - }; - } - - return function (row) { - return [row]; - }; - } - /** - * Preprocess each object according to the give opts (unwind, flatten, etc.). - * The actual body of the function is dynamically set on the constructor by the - * `memoizePreprocessRow` method after parsing the options. - * + * Preprocess each object according to the given transforms (unwind, flatten, etc.). * @param {Object} row JSON object to be converted in a CSV row */ - preprocessRow() {} + preprocessRow(row) { + return this.opts.transforms.reduce((rows, transform) => + rows.map(row => transform(row)).reduce(flattenReducer, []), + [row] + ); + } /** * Create the content of a specific CSV row @@ -206,75 +181,6 @@ class JSON2CSVBase { return value; } - - /** - * Performs the flattening of a data row recursively - * - * @param {Object} dataRow Original JSON object - * @param {String} separator Separator to be used as the flattened field name - * @returns {Object} Flattened object - */ - flatten(dataRow, separator) { - function step (obj, flatDataRow, currentPath) { - Object.keys(obj).forEach((key) => { - const value = obj[key]; - - const newPath = currentPath - ? `${currentPath}${separator}${key}` - : key; - - if (typeof value !== 'object' - || value === null - || Array.isArray(value) - || Object.prototype.toString.call(value.toJSON) === '[object Function]' - || !Object.keys(value).length) { - flatDataRow[newPath] = value; - return; - } - - step(value, flatDataRow, newPath); - }); - - return flatDataRow; - } - - return step(dataRow, {}); - } - - /** - * Performs the unwind recursively in specified sequence - * - * @param {Object} dataRow Original JSON object - * @param {String[]} unwindPaths The paths as strings to be used to deconstruct the array - * @returns {Array} Array of objects containing all rows after unwind of chosen paths - */ - unwindData(dataRow, unwindPaths) { - const unwind = (rows, unwindPath) => { - return rows - .map(row => { - const unwindArray = lodashGet(row, unwindPath); - - if (!Array.isArray(unwindArray)) { - return row; - } - - if (!unwindArray.length) { - return setProp(row, unwindPath, undefined); - } - - return unwindArray.map((unwindRow, index) => { - const clonedRow = (this.opts.unwindBlank && index > 0) - ? {} - : row; - - return setProp(clonedRow, unwindPath, unwindRow); - }); - }) - .reduce(flattenReducer, []); - }; - - return unwindPaths.reduce(unwind, [dataRow]); - } } module.exports = JSON2CSVBase; diff --git a/lib/JSON2CSVParser.js b/lib/JSON2CSVParser.js index 6d284be9..9b3b0c0f 100644 --- a/lib/JSON2CSVParser.js +++ b/lib/JSON2CSVParser.js @@ -57,9 +57,7 @@ class JSON2CSVParser extends JSON2CSVBase { throw new Error('Data should not be empty or the "fields" option should be included'); } - if ((!this.opts.unwind || !this.opts.unwind.length) && !this.opts.flatten) { - return processedData; - } + if (this.opts.transforms.length === 0) return processedData; return processedData .map(row => this.preprocessRow(row)) diff --git a/lib/JSON2CSVTransform.js b/lib/JSON2CSVTransform.js index 75ac1564..f7a52f24 100644 --- a/lib/JSON2CSVTransform.js +++ b/lib/JSON2CSVTransform.js @@ -13,7 +13,6 @@ class JSON2CSVTransform extends Transform { Object.getOwnPropertyNames(JSON2CSVBase.prototype) .forEach(key => (this[key] = JSON2CSVBase.prototype[key])); this.opts = this.preprocessOpts(opts); - this.preprocessRow = this.memoizePreprocessRow(); this._data = ''; this._hasWritten = false; diff --git a/lib/json2csv.js b/lib/json2csv.js index 4d57b4ce..7fe5ba4f 100644 --- a/lib/json2csv.js +++ b/lib/json2csv.js @@ -4,6 +4,8 @@ const { Readable } = require('stream'); const JSON2CSVParser = require('./JSON2CSVParser'); const JSON2CSVAsyncParser = require('./JSON2CSVAsyncParser'); const JSON2CSVTransform = require('./JSON2CSVTransform'); +const flatten = require('./transforms/flatten'); +const unwind = require('./transforms/unwind'); module.exports.Parser = JSON2CSVParser; module.exports.AsyncParser = JSON2CSVAsyncParser; @@ -35,3 +37,8 @@ module.exports.parseAsync = (data, opts, transformOpts) => { return Promise.reject(err); } }; + +module.exports.transforms = { + flatten, + unwind, +}; \ No newline at end of file diff --git a/lib/transforms/flatten.js b/lib/transforms/flatten.js new file mode 100644 index 00000000..11a3cfc2 --- /dev/null +++ b/lib/transforms/flatten.js @@ -0,0 +1,31 @@ +/** + * Performs the flattening of a data row recursively + * + * @param {String} separator Separator to be used as the flattened field name + * @returns {Object => Object} Flattened object + */ +function flatten(separator = '.') { + function step (obj, flatDataRow, currentPath) { + Object.keys(obj).forEach((key) => { + const newPath = currentPath ? `${currentPath}${separator}${key}` : key; + const value = obj[key]; + + if (typeof value !== 'object' + || value === null + || Array.isArray(value) + || Object.prototype.toString.call(value.toJSON) === '[object Function]' + || !Object.keys(value).length) { + flatDataRow[newPath] = value; + return; + } + + step(value, flatDataRow, newPath); + }); + + return flatDataRow; + } + + return dataRow => step(dataRow, {}); +} + +module.exports = flatten; diff --git a/lib/transforms/unwind.js b/lib/transforms/unwind.js new file mode 100644 index 00000000..fa70f021 --- /dev/null +++ b/lib/transforms/unwind.js @@ -0,0 +1,40 @@ + +const lodashGet = require('lodash.get'); +const { setProp, flattenReducer } = require('../utils'); + +/** + * Performs the unwind recursively in specified sequence + * + * @param {String[]} unwindPaths The paths as strings to be used to deconstruct the array + * @returns {Object => Array} Array of objects containing all rows after unwind of chosen paths +*/ +function unwind(paths, blankOut = false) { + function unwindReducer(rows, unwindPath) { + return rows + .map(row => { + const unwindArray = lodashGet(row, unwindPath); + + if (!Array.isArray(unwindArray)) { + return row; + } + + if (!unwindArray.length) { + return setProp(row, unwindPath, undefined); + } + + return unwindArray.map((unwindRow, index) => { + const clonedRow = (blankOut && index > 0) + ? {} + : row; + + return setProp(clonedRow, unwindPath, unwindRow); + }); + }) + .reduce(flattenReducer, []); + } + + paths = Array.isArray(paths) ? paths : (paths ? [paths] : []); + return dataRow => paths.reduce(unwindReducer, [dataRow]); +} + +module.exports = unwind; \ No newline at end of file diff --git a/test/CLI.js b/test/CLI.js index 822154ae..fb7d8388 100644 --- a/test/CLI.js +++ b/test/CLI.js @@ -273,76 +273,6 @@ module.exports = (testRunner, jsonFixtures, csvFixtures) => { }); }); - // Preprocessing - - testRunner.add('should support unwinding an object into multiple rows', (t) => { - const opts = '--unwind colors'; - - exec(`${cli} -i "${getFixturePath('/json/unwind.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.unwind); - t.end(); - }); - }); - - testRunner.add('should support multi-level unwind', (t) => { - const opts = '--fields carModel,price,extras.items.name,extras.items.color,extras.items.items.position,extras.items.items.color' - + ' --unwind extras.items,extras.items.items'; - - exec(`${cli} -i "${getFixturePath('/json/unwind2.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.unwind2); - t.end(); - }); - }); - - testRunner.add('hould unwind and blank out repeated data', (t) => { - const opts = '--fields carModel,price,extras.items.name,extras.items.color,extras.items.items.position,extras.items.items.color' - + ' --unwind extras.items,extras.items.items --unwind-blank'; - - exec(`${cli} -i "${getFixturePath('/json/unwind2.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.unwind2Blank); - t.end(); - }); - }); - - testRunner.add('should support flattening deep JSON', (t) => { - const opts = '--flatten'; - - exec(`${cli} -i "${getFixturePath('/json/deepJSON.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.flattenedDeepJSON); - t.end(); - }); - }); - - testRunner.add('should support custom flatten separator', (t) => { - const opts = '--flatten --flatten-separator __'; - - exec(`${cli} -i "${getFixturePath('/json/deepJSON.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); - t.end(); - }); - }); - - testRunner.add('should unwind and flatten an object in the right order', (t) => { - const opts = '--unwind items --flatten'; - - exec(`${cli} -i "${getFixturePath('/json/unwindAndFlatten.json')}" ${opts}`, (err, stdout, stderr) => { - t.notOk(stderr); - const csv = stdout; - t.equal(csv, csvFixtures.unwindAndFlatten); - t.end(); - }); - }); - // Default value testRunner.add('should output the default value as set in \'defaultValue\'', (t) => { @@ -808,4 +738,74 @@ module.exports = (testRunner, jsonFixtures, csvFixtures) => { t.end(); }); }); + + // Preprocessing + + testRunner.add('should support unwinding an object into multiple rows using the unwind transform', (t) => { + const opts = '--unwind colors'; + + exec(`${cli} -i "${getFixturePath('/json/unwind.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.unwind); + t.end(); + }); + }); + + testRunner.add('should support multi-level unwind using the unwind transform', (t) => { + const opts = '--fields carModel,price,extras.items.name,extras.items.color,extras.items.items.position,extras.items.items.color' + + ' --unwind extras.items,extras.items.items'; + + exec(`${cli} -i "${getFixturePath('/json/unwind2.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.unwind2); + t.end(); + }); + }); + + testRunner.add('hould unwind and blank out repeated data', (t) => { + const opts = '--fields carModel,price,extras.items.name,extras.items.color,extras.items.items.position,extras.items.items.color' + + ' --unwind extras.items,extras.items.items --unwind-blank'; + + exec(`${cli} -i "${getFixturePath('/json/unwind2.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.unwind2Blank); + t.end(); + }); + }); + + testRunner.add('should support flattening deep JSON using the flatten transform', (t) => { + const opts = '--flatten'; + + exec(`${cli} -i "${getFixturePath('/json/deepJSON.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.flattenedDeepJSON); + t.end(); + }); + }); + + testRunner.add('should support custom flatten separator using the flatten transform', (t) => { + const opts = '--flatten --flatten-separator __'; + + exec(`${cli} -i "${getFixturePath('/json/deepJSON.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); + t.end(); + }); + }); + + testRunner.add('should support multiple transforms and honor the order in which they are declared', (t) => { + const opts = '--unwind items --flatten'; + + exec(`${cli} -i "${getFixturePath('/json/unwindAndFlatten.json')}" ${opts}`, (err, stdout, stderr) => { + t.notOk(stderr); + const csv = stdout; + t.equal(csv, csvFixtures.unwindAndFlatten); + t.end(); + }); + }); }; diff --git a/test/JSON2CSVAsyncParser.js b/test/JSON2CSVAsyncParser.js index 2d489506..4ce4e71b 100644 --- a/test/JSON2CSVAsyncParser.js +++ b/test/JSON2CSVAsyncParser.js @@ -1,7 +1,7 @@ 'use strict'; const { Readable, Transform, Writable } = require('stream'); -const { AsyncParser, parseAsync } = require('../lib/json2csv'); +const { AsyncParser, parseAsync, transforms: { flatten, unwind } } = require('../lib/json2csv'); module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) => { testRunner.add('should should error async if invalid opts are passed using parseAsync method', async (t) => { @@ -506,110 +506,6 @@ module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) = t.end(); }); - // Preprocessing - - testRunner.add('should support unwinding an object into multiple rows', async (t) => { - const opts = { - fields: ['carModel', 'price', 'colors'], - unwind: 'colors' - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.unwind()).promise(); - t.equal(csv, csvFixtures.unwind); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - - testRunner.add('should support multi-level unwind', async (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'] - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.unwind2()).promise(); - t.equal(csv, csvFixtures.unwind2); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - - testRunner.add('should unwind and blank out repeated data', async (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'], - unwindBlank: true - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.unwind2()).promise(); - t.equal(csv, csvFixtures.unwind2Blank); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - - testRunner.add('should support flattening deep JSON', async (t) => { - const opts = { - flatten: true - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.deepJSON()).promise(); - t.equal(csv, csvFixtures.flattenedDeepJSON); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - - testRunner.add('should support custom flatten separator', async (t) => { - const opts = { - flatten: true, - flattenSeparator: '__', - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.deepJSON()).promise(); - t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - - testRunner.add('should unwind and flatten an object in the right order', async (t) => { - const opts = { - unwind: ['items'], - flatten: true - }; - const parser = new AsyncParser(opts); - - try { - const csv = await parser.fromInput(jsonFixtures.unwindAndFlatten()).promise(); - t.equal(csv, csvFixtures.unwindAndFlatten); - } catch(err) { - t.fail(err.message); - } - - t.end(); - }); - // Default value testRunner.add('should output the default value as set in \'defaultValue\'', async (t) => { @@ -1230,4 +1126,126 @@ module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) = t.end(); }); + // Transforms + + testRunner.add('should support unwinding an object into multiple rows using the unwind transform', async (t) => { + const opts = { + fields: ['carModel', 'price', 'colors'], + transforms: [unwind(['colors'])], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.unwind()).promise(); + t.equal(csv, csvFixtures.unwind); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support multi-level unwind using the unwind transform', async (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'])], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.unwind2()).promise(); + t.equal(csv, csvFixtures.unwind2); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support unwind and blank out repeated data using the unwind transform', async (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'], true)], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.unwind2()).promise(); + t.equal(csv, csvFixtures.unwind2Blank); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support flattening deep JSON using the flatten transform', async (t) => { + const opts = { + transforms: [flatten()], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.deepJSON()).promise(); + t.equal(csv, csvFixtures.flattenedDeepJSON); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support custom flatten separator using the flatten transform', async (t) => { + const opts = { + transforms: [flatten('__')], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.deepJSON()).promise(); + t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support multiple transforms and honor the order in which they are declared', async (t) => { + const opts = { + transforms: [unwind(['items']), flatten()], + }; + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.unwindAndFlatten()).promise(); + t.equal(csv, csvFixtures.unwindAndFlatten); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); + + testRunner.add('should support custom transforms', async (t) => { + const opts = { + transforms: [row => ({ + model: row.carModel, + price: row.price / 1000, + color: row.color, + transmission: row.transmission || 'automatic', + })], + }; + + const parser = new AsyncParser(opts); + + try { + const csv = await parser.fromInput(jsonFixtures.default()).promise(); + t.equal(csv, csvFixtures.defaultCustomTransform); + } catch(err) { + t.fail(err.message); + } + + t.end(); + }); }; diff --git a/test/JSON2CSVParser.js b/test/JSON2CSVParser.js index 0024fba3..8040cd5f 100644 --- a/test/JSON2CSVParser.js +++ b/test/JSON2CSVParser.js @@ -1,6 +1,6 @@ 'use strict'; -const { parse, Parser: Json2csvParser } = require('../lib/json2csv'); +const { parse, Parser: Json2csvParser, transforms: { flatten, unwind } } = require('../lib/json2csv'); module.exports = (testRunner, jsonFixtures, csvFixtures) => { testRunner.add('should parse json to csv, infer the fields automatically and not modify the opts passed using parse method', (t) => { @@ -298,99 +298,6 @@ module.exports = (testRunner, jsonFixtures, csvFixtures) => { t.end(); }); - // Preprocessing - - testRunner.add('should support unwinding an object into multiple rows', (t) => { - const opts = { - fields: ['carModel', 'price', 'colors'], - unwind: 'colors' - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.unwind); - - t.equal(csv, csvFixtures.unwind); - t.end(); - }); - - testRunner.add('should support multi-level unwind', (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'] - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.unwind2); - - t.equal(csv, csvFixtures.unwind2); - t.end(); - }); - - testRunner.add('should unwind and blank out repeated data', (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'], - unwindBlank: true - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.unwind2); - - t.equal(csv, csvFixtures.unwind2Blank); - t.end(); - }); - - - testRunner.add('should support flattening deep JSON', (t) => { - const opts = { - flatten: true - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.deepJSON); - - t.equal(csv, csvFixtures.flattenedDeepJSON); - t.end(); - }); - - testRunner.add('should support flattening JSON with toJSON', (t) => { - const opts = { - flatten: true - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.flattenToJSON); - - t.equal(csv, csvFixtures.flattenToJSON); - t.end(); - }); - - testRunner.add('should support custom flatten separator', (t) => { - const opts = { - flatten: true, - flattenSeparator: '__', - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.deepJSON); - - t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); - t.end(); - }); - - testRunner.add('should unwind and flatten an object in the right order', (t) => { - const opts = { - unwind: ['items'], - flatten: true - }; - - const parser = new Json2csvParser(opts); - const csv = parser.parse(jsonFixtures.unwindAndFlatten); - - t.equal(csv, csvFixtures.unwindAndFlatten); - t.end(); - }); - // Default value testRunner.add('should output the default value as set in \'defaultValue\'', (t) => { @@ -759,4 +666,111 @@ module.exports = (testRunner, jsonFixtures, csvFixtures) => { t.equal(csv.length, csvFixtures.withBOM.length); t.end(); }); + + // Transforms + + testRunner.add('should support unwinding an object into multiple rows using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'colors'], + transforms: [unwind(['colors'])], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.unwind); + + t.equal(csv, csvFixtures.unwind); + t.end(); + }); + + testRunner.add('should support multi-level unwind using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'])], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.unwind2); + + t.equal(csv, csvFixtures.unwind2); + t.end(); + }); + + testRunner.add('should support unwind and blank out repeated data using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'], true)], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.unwind2); + + t.equal(csv, csvFixtures.unwind2Blank); + t.end(); + }); + + + testRunner.add('should support flattening deep JSON using the flatten transform', (t) => { + const opts = { + transforms: [flatten()], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.deepJSON); + + t.equal(csv, csvFixtures.flattenedDeepJSON); + t.end(); + }); + + testRunner.add('should support flattening JSON with toJSON using the flatten transform', (t) => { + const opts = { + transforms: [flatten()], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.flattenToJSON); + + t.equal(csv, csvFixtures.flattenToJSON); + t.end(); + }); + + testRunner.add('should support custom flatten separator using the flatten transform', (t) => { + const opts = { + transforms: [flatten('__')], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.deepJSON); + + t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); + t.end(); + }); + + testRunner.add('should support multiple transforms and honor the order in which they are declared', (t) => { + const opts = { + transforms: [unwind('items'), flatten()], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.unwindAndFlatten); + + t.equal(csv, csvFixtures.unwindAndFlatten); + t.end(); + }); + + testRunner.add('should support custom transforms', (t) => { + const opts = { + transforms: [row => ({ + model: row.carModel, + price: row.price / 1000, + color: row.color, + transmission: row.transmission || 'automatic', + })], + }; + + const parser = new Json2csvParser(opts); + const csv = parser.parse(jsonFixtures.default); + + t.equal(csv, csvFixtures.defaultCustomTransform); + t.end(); + }); }; diff --git a/test/JSON2CSVTransform.js b/test/JSON2CSVTransform.js index e1512e5a..aad092f8 100644 --- a/test/JSON2CSVTransform.js +++ b/test/JSON2CSVTransform.js @@ -1,7 +1,7 @@ 'use strict'; const { Readable } = require('stream'); -const { Transform: Json2csvTransform } = require('../lib/json2csv'); +const { Transform: Json2csvTransform, transforms: { flatten, unwind } } = require('../lib/json2csv'); module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) => { testRunner.add('should handle object mode', (t) => { @@ -505,142 +505,6 @@ module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) = }); }); - // Preprocessing - - testRunner.add('should support unwinding an object into multiple rows', (t) => { - const opts = { - fields: ['carModel', 'price', 'colors'], - unwind: 'colors' - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.unwind().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.unwind); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - - testRunner.add('should support multi-level unwind', (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'] - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.unwind2().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.unwind2); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - - - - testRunner.add('should unwind and blank out repeated data', (t) => { - const opts = { - fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], - unwind: ['extras.items', 'extras.items.items'], - unwindBlank: true - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.unwind2().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.unwind2Blank); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - - testRunner.add('should support flattening deep JSON', (t) => { - const opts = { - flatten: true - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.deepJSON().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.flattenedDeepJSON); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - - testRunner.add('should support custom flatten separator', (t) => { - const opts = { - flatten: true, - flattenSeparator: '__', - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.deepJSON().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - - testRunner.add('should unwind and flatten an object in the right order', (t) => { - const opts = { - unwind: ['items'], - flatten: true - }; - - const transform = new Json2csvTransform(opts); - const processor = jsonFixtures.unwindAndFlatten().pipe(transform); - - let csv = ''; - processor - .on('data', chunk => (csv += chunk.toString())) - .on('end', () => { - t.equal(csv, csvFixtures.unwindAndFlatten); - t.end(); - }) - .on('error', err => { - t.fail(err.message); - t.end(); - }); - }); - // Default value testRunner.add('should output the default value as set in \'defaultValue\'', (t) => { @@ -1256,4 +1120,163 @@ module.exports = (testRunner, jsonFixtures, csvFixtures, inMemoryJsonFixtures) = t.end(); }); }); + + // Transform + + testRunner.add('should support unwinding an object into multiple rows using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'colors'], + transforms: [unwind(['colors'])], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.unwind().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.unwind); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + testRunner.add('should support multi-level unwind using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'])], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.unwind2().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.unwind2); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + + + testRunner.add('should support unwind and blank out repeated data using the unwind transform', (t) => { + const opts = { + fields: ['carModel', 'price', 'extras.items.name', 'extras.items.color', 'extras.items.items.position', 'extras.items.items.color'], + transforms: [unwind(['extras.items', 'extras.items.items'], true)], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.unwind2().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.unwind2Blank); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + testRunner.add('should support flattening deep JSON using the flatten transform', (t) => { + const opts = { + transforms: [flatten()], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.deepJSON().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.flattenedDeepJSON); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + testRunner.add('should support custom flatten separator using the flatten transform', (t) => { + const opts = { + transforms: [flatten('__')], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.deepJSON().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.flattenedCustomSeparatorDeepJSON); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + testRunner.add('should support multiple transforms and honor the order in which they are declared', (t) => { + const opts = { + transforms: [unwind(['items']), flatten()], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.unwindAndFlatten().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.unwindAndFlatten); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); + + testRunner.add('should support custom transforms', async (t) => { + const opts = { + transforms: [row => ({ + model: row.carModel, + price: row.price / 1000, + color: row.color, + transmission: row.transmission || 'automatic', + })], + }; + + const transform = new Json2csvTransform(opts); + const processor = jsonFixtures.default().pipe(transform); + + let csv = ''; + processor + .on('data', chunk => (csv += chunk.toString())) + .on('end', () => { + t.equal(csv, csvFixtures.defaultCustomTransform); + t.end(); + }) + .on('error', err => { + t.fail(err.message); + t.end(); + }); + }); }; diff --git a/test/fixtures/csv/defaultCustomTransform.csv b/test/fixtures/csv/defaultCustomTransform.csv new file mode 100644 index 00000000..23da3c96 --- /dev/null +++ b/test/fixtures/csv/defaultCustomTransform.csv @@ -0,0 +1,5 @@ +"model","price","color","transmission" +"Audi",0,"blue","automatic" +"BMW",15,"red","manual" +"Mercedes",20,"yellow","automatic" +"Porsche",30,"green","automatic" \ No newline at end of file