diff --git a/src/compile/data/parse.ts b/src/compile/data/parse.ts index c730992a86..0a80feb442 100644 --- a/src/compile/data/parse.ts +++ b/src/compile/data/parse.ts @@ -1,6 +1,6 @@ import {MAIN, RAW} from '../../data'; import * as log from '../../log'; -import {isAggregate, isBin, isCalculate, isFilter, isLookup, isTimeUnit, isWindow} from '../../transform'; +import {isAggregate, isBin, isCalculate, isFilter, isLookup, isStack, isTimeUnit, isWindow} from '../../transform'; import {Dict, keys} from '../../util'; import {isFacetModel, isLayerModel, isUnitModel, Model} from '../model'; import {requiresSelectionId} from '../selection/selection'; @@ -86,6 +86,12 @@ export function parseTransformArray(head: DataFlowNode, model: Model, ancestorPa for (const field of keys(window.producedFields())) { ancestorParse.set(field, 'derived', false); } + } else if (isStack(t)) { + const stack = head = StackNode.makeFromTransform(head, t); + + for (const field of keys(stack.producedFields())) { + ancestorParse.set(field, 'derived', false); + } } else { log.warn(log.message.invalidTransformIgnored(t)); return; @@ -219,7 +225,7 @@ export function parseData(model: Model): DataComponent { } } - head = StackNode.make(head, model) || head; + head = StackNode.makeFromEncoding(head, model) || head; } if (isUnitModel(model)) { diff --git a/src/compile/data/stack.ts b/src/compile/data/stack.ts index 001ca95605..5445df76c8 100644 --- a/src/compile/data/stack.ts +++ b/src/compile/data/stack.ts @@ -1,13 +1,13 @@ -import {isArray} from 'vega-util'; +import {isArray, isString} from 'vega-util'; import {FieldDef, isFieldDef, vgField} from '../../fielddef'; import {StackOffset} from '../../stack'; +import {StackTransform} from '../../transform'; import {duplicate} from '../../util'; -import {VgSort, VgTransform} from '../../vega.schema'; +import {VgComparatorOrder, VgSort, VgTransform} from '../../vega.schema'; import {sortParams} from '../common'; import {UnitModel} from './../unit'; import {DataFlowNode} from './dataflow'; - function getStackByFields(model: UnitModel): string[] { return model.stack.stackBy.reduce((fields, by) => { const fieldDef = by.fieldDef; @@ -21,35 +21,53 @@ function getStackByFields(model: UnitModel): string[] { } export interface StackComponent { + /** * Faceted field. */ facetby: string[]; - dimensionFieldDef: FieldDef; + dimensionFieldDef?: FieldDef; /** - * Stack measure's field + * Stack measure's field. Used in makeFromEncoding. */ - field: string; + stackField: string; /** * Level of detail fields for each level in the stacked charts such as color or detail. + * Used in makeFromEncoding. */ - stackby: string[]; + stackby?: string[]; /** * Field that determines order of levels in the stacked charts. + * Used in both but optional in transform. */ sort: VgSort; - /** Mode for stacking marks. */ + /** Mode for stacking marks. + */ offset: StackOffset; /** - * Whether to impute the data before stacking. + * Whether to impute the data before stacking. Used only in makeFromEncoding. + */ + impute?: boolean; + + /** + * The data fields to group by. + */ + groupby?: string[]; + /** + * Output field names of each stack field. */ - impute: boolean; + as: string[]; + +} + +function isValidAsArray(as: string[] | string): as is string[] { + return isArray(as) && as.every(s => isString(s)) && as.length >1; } export class StackNode extends DataFlowNode { @@ -65,7 +83,42 @@ export class StackNode extends DataFlowNode { this._stack = stack; } - public static make(parent: DataFlowNode, model: UnitModel) { + public static makeFromTransform(parent: DataFlowNode, stackTransform: StackTransform) { + + const {stack, groupby, as, offset='zero'} = stackTransform; + + const sortFields: string[] = []; + const sortOrder: VgComparatorOrder[] = []; + if (stackTransform.sort !== undefined) { + for (const sortField of stackTransform.sort) { + sortFields.push(sortField.field); + sortOrder.push(sortField.order === undefined ? 'ascending' : sortField.order as VgComparatorOrder); + } + } + const sort: VgSort = { + field: sortFields, + order: sortOrder, + }; + let normalizedAs: Array; + if (isValidAsArray(as)) { + normalizedAs = as; + } else if(isString(as)) { + normalizedAs = [as, as + '_end']; + } else { + normalizedAs = [stackTransform.stack + '_start', stackTransform.stack + '_end']; + } + + return new StackNode (parent, { + stackField: stack, + groupby, + offset, + sort, + facetby: [], + as: normalizedAs + }); + + } + public static makeFromEncoding(parent: DataFlowNode, model: UnitModel) { const stackProperties = model.stack; @@ -93,15 +146,19 @@ export class StackNode extends DataFlowNode { return s; }, {field:[], order: []}); } + // Refactored to add "as" in the make phase so that we can get producedFields + // from the as property + const field = model.vgField(stackProperties.fieldChannel); return new StackNode(parent, { dimensionFieldDef, - field: model.vgField(stackProperties.fieldChannel), + stackField:field, facetby: [], stackby, sort, offset: stackProperties.offset, impute: stackProperties.impute, + as: [field + '_start', field + '_end'] }); } @@ -116,7 +173,7 @@ export class StackNode extends DataFlowNode { public dependentFields() { const out = {}; - out[this._stack.field] = true; + out[this._stack.stackField] = true; this.getGroupbyFields().forEach(f => out[f] = true); this._stack.facetby.forEach(f => out[f] = true); @@ -127,16 +184,14 @@ export class StackNode extends DataFlowNode { } public producedFields() { - const out = {}; - - out[this._stack.field + '_start'] = true; - out[this._stack.field + '_end'] = true; - - return out; + return this._stack.as.reduce((result, item) => { + result[item] = true; + return result; + }, {}); } private getGroupbyFields() { - const {dimensionFieldDef, impute} = this._stack; + const {dimensionFieldDef, impute, groupby} = this._stack; if (dimensionFieldDef) { if (dimensionFieldDef.bin) { if (impute) { @@ -152,15 +207,14 @@ export class StackNode extends DataFlowNode { } return [vgField(dimensionFieldDef)]; } - return []; + return groupby || []; } public assemble(): VgTransform[] { const transform: VgTransform[] = []; + const {facetby, dimensionFieldDef, stackField: field, stackby, sort, offset, impute, as} = this._stack; - const {facetby, field: stackField, dimensionFieldDef, impute, offset, sort, stackby} = this._stack; - - // Impute + // Impute if (impute && dimensionFieldDef) { const dimensionField = dimensionFieldDef ? vgField(dimensionFieldDef, {binSuffix: 'mid'}): undefined; @@ -180,7 +234,7 @@ export class StackNode extends DataFlowNode { transform.push({ type: 'impute', - field: stackField, + field, groupby: stackby, key: dimensionField, method: 'value', @@ -192,12 +246,9 @@ export class StackNode extends DataFlowNode { transform.push({ type: 'stack', groupby: this.getGroupbyFields().concat(facetby), - field: stackField, + field, sort, - as: [ - stackField + '_start', - stackField + '_end' - ], + as, offset }); diff --git a/src/transform.ts b/src/transform.ts index a33c3da617..4fd3c12296 100644 --- a/src/transform.ts +++ b/src/transform.ts @@ -103,6 +103,38 @@ export interface AggregatedFieldDef { } +/** + * @hide + */ +export interface StackTransform { + /** + * The field which is stacked. + */ + stack: string; + /** + * The data fields to group by. + */ + groupby: string[]; + /** + * Mode for stacking marks. + * __Default value:__ `"zero"` + */ + offset?: 'zero' | 'center' | 'normalize'; + /** + * Field that determines the order of leaves in the stacked charts. + */ + sort?: SortField[]; + /** + * Output field names. This can be either a string or an array of strings with + * two elements denoting the name for the fields for stack start and stack end + * respectively. + * If a single string(eg."val") is provided, the end field will be "val_end". + */ + as: string | string[]; + +} + + export type WindowOnlyOp = 'row_number' | 'rank' | @@ -239,7 +271,11 @@ export function isAggregate(t: Transform): t is AggregateTransform { return t['aggregate'] !== undefined; } -export type Transform = FilterTransform | CalculateTransform | LookupTransform | BinTransform | TimeUnitTransform | AggregateTransform | WindowTransform; +export function isStack(t: Transform): t is StackTransform { + return t['stack'] !== undefined; +} + +export type Transform = FilterTransform | CalculateTransform | LookupTransform | BinTransform | TimeUnitTransform | AggregateTransform | WindowTransform | StackTransform; export function normalizeTransform(transform: Transform[]) { return transform.map(t => { diff --git a/test/compile/data/stack.test.ts b/test/compile/data/stack.test.ts index aee5fe1314..3189f938c6 100644 --- a/test/compile/data/stack.test.ts +++ b/test/compile/data/stack.test.ts @@ -3,91 +3,95 @@ import {assert} from 'chai'; import {StackComponent, StackNode} from '../../../src/compile/data/stack'; - import {UnitModel} from '../../../src/compile/unit'; -import {VgTransform} from '../../../src/vega.schema'; +import {Transform} from '../../../src/transform'; +import {VgComparatorOrder, VgSort, VgTransform} from '../../../src/vega.schema'; import {parseUnitModelWithScale} from '../../util'; function parse(model: UnitModel) { - return StackNode.make(null, model).stack; + return StackNode.makeFromEncoding(null, model).stack; } function assemble(model: UnitModel) { - return StackNode.make(null, model).assemble(); + return StackNode.makeFromEncoding(null, model).assemble(); } +describe ('compile/data/stack', () => { -describe('compile/data/stack', () => { - it('should produce correct stack component for bar with color', () => { - const model = parseUnitModelWithScale({ - "mark": "bar", - "encoding": { - "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, - "y": {"field": "b", "type": "nominal"}, - "color": {"field": "c", "type": "ordinal",} - } - }); + describe('StackNode.makeFromEncoding', () => { + it('should produce correct stack component for bar with color', () => { + const model = parseUnitModelWithScale({ + "mark": "bar", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "y": {"field": "b", "type": "nominal"}, + "color": {"field": "c", "type": "ordinal",} + } + }); - assert.deepEqual(parse(model), { - dimensionFieldDef: {field: 'b', type: 'nominal'}, - facetby: [], - field: 'sum_a', - stackby: ['c'], - sort: { - field: ['c'], - order: ['descending'] - }, - offset: 'zero', - impute: false + assert.deepEqual(parse(model), { + dimensionFieldDef: {field: 'b', type: 'nominal'}, + facetby: [], + stackField: 'sum_a', + stackby: ['c'], + sort: { + field: ['c'], + order: ['descending'] + }, + offset: 'zero', + impute: false, + as: ['sum_a_start', 'sum_a_end'] + }); }); - }); - it('should produce correct stack component with both start and end of the binned field for bar with color and binned y', () => { - const model = parseUnitModelWithScale({ - "mark": "bar", - "encoding": { - "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, - "y": {"bin": true, "field": "b", "type": "quantitative"}, - "color": {"field": "c", "type": "ordinal",} - } - }); + it('should produce correct stack component with both start and end of the binned field for bar with color and binned y', () => { + const model = parseUnitModelWithScale({ + "mark": "bar", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "y": {"bin": true, "field": "b", "type": "quantitative"}, + "color": {"field": "c", "type": "ordinal",} + } + }); - assert.deepEqual(parse(model), { - dimensionFieldDef: {"bin": {maxbins: 10}, "field": "b", "type": "quantitative"}, - facetby: [], - field: 'sum_a', - stackby: ['c'], - sort: { - field: ['c'], - order: ['descending'] - }, - offset: 'zero', - impute: false + assert.deepEqual(parse(model), { + dimensionFieldDef: {"bin": {maxbins: 10}, "field": "b", "type": "quantitative"}, + facetby: [], + stackField: 'sum_a', + stackby: ['c'], + sort: { + field: ['c'], + order: ['descending'] + }, + offset: 'zero', + impute: false, + as: ['sum_a_start', 'sum_a_end'] + }); }); - }); - it('should produce correct stack component for 1D bar with color', () => { - const model = parseUnitModelWithScale({ - "mark": "bar", - "encoding": { - "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, - "color": {"field": "c", "type": "ordinal",} - } - }); + it('should produce correct stack component for 1D bar with color', () => { + const model = parseUnitModelWithScale({ + "mark": "bar", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "color": {"field": "c", "type": "ordinal",} + } + }); - assert.deepEqual(parse(model), { - dimensionFieldDef: undefined, - facetby: [], - field: 'sum_a', - stackby: ['c'], - sort: { - field: ['c'], - order: ['descending'] - }, - offset: 'zero', - impute: false - }); + assert.deepEqual(parse(model), { + dimensionFieldDef: undefined, + facetby: [], + stackField: 'sum_a', + stackby: ['c'], + sort: { + field: ['c'], + order: ['descending'] + }, + offset: 'zero', + impute: false, + as: ['sum_a_start', 'sum_a_end'] + }); - assert.deepEqual(assemble(model), [{ + assert.deepEqual(assemble(model), [{ type: 'stack', groupby: [], field: 'sum_a', @@ -98,104 +102,217 @@ describe('compile/data/stack', () => { as: ['sum_a_start', 'sum_a_end'], offset: 'zero' } - ]); - }); - - it('should produce correct stack component for area with color and order', function() { - const model = parseUnitModelWithScale({ - "mark": "area", - "encoding": { - "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, - "y": {"field": "b", "type": "nominal"}, - "color": {"field": "c", "type": "nominal"}, - "order": {"aggregate": "mean", "field": "d", "type": "quantitative"} - } + ]); }); - assert.deepEqual(parse(model), { - dimensionFieldDef: {field: 'b', type: 'nominal'}, - facetby: [], - field: 'sum_a', - stackby: ['c'], - sort: { - field: ['mean_d'], - order: ['ascending'] - }, - offset: 'zero', - impute: true - }); + it('should produce correct stack component for area with color and order', function() { + const model = parseUnitModelWithScale({ + "mark": "area", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "y": {"field": "b", "type": "nominal"}, + "color": {"field": "c", "type": "nominal"}, + "order": {"aggregate": "mean", "field": "d", "type": "quantitative"} + } + }); - assert.deepEqual(assemble(model), [ - { - type: 'impute', - field: 'sum_a', - groupby: ['c'], - key: 'b', - method: "value", - value: 0 - }, - { - type: 'stack', - groupby: ['b'], - field: 'sum_a', + assert.deepEqual(parse(model), { + dimensionFieldDef: {field: 'b', type: 'nominal'}, + facetby: [], + stackField: 'sum_a', + stackby: ['c'], sort: { field: ['mean_d'], order: ['ascending'] }, - as: ['sum_a_start', 'sum_a_end'], - offset: 'zero' - } - ]); - }); + offset: 'zero', + impute: true, + as: ['sum_a_start', 'sum_a_end'] + }); - it('should produce correct stack component for area with color and binned dimension', function() { - const model = parseUnitModelWithScale({ - "mark": "area", - "encoding": { - "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, - "y": {"bin": true, "field": "b", "type": "quantitative"}, - "color": {"field": "c", "type": "nominal"} - } + assert.deepEqual(assemble(model), [ + { + type: 'impute', + field: 'sum_a', + groupby: ['c'], + key: 'b', + method: "value", + value: 0 + }, + { + type: 'stack', + groupby: ['b'], + field: 'sum_a', + sort: { + field: ['mean_d'], + order: ['ascending'] + }, + as: ['sum_a_start', 'sum_a_end'], + offset: 'zero' + } + ]); }); - assert.deepEqual(parse(model), { - dimensionFieldDef: {"bin": {maxbins: 10}, "field": "b", "type": "quantitative"}, - facetby: [], - field: 'sum_a', - stackby: ['c'], - sort: { - field: ['c'], - order: ['descending'] - }, - offset: 'zero', - impute: true - }); + it('should produce correct stack component for area with color and binned dimension', function() { + const model = parseUnitModelWithScale({ + "mark": "area", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "y": {"bin": true, "field": "b", "type": "quantitative"}, + "color": {"field": "c", "type": "nominal"} + } + }); - assert.deepEqual(assemble(model), [ - { - type: 'formula', - expr: '(datum[\"bin_maxbins_10_b\"]+datum[\"bin_maxbins_10_b_end\"])/2', - as: 'bin_maxbins_10_b_mid' - }, - { - type: 'impute', - field: 'sum_a', - groupby: ['c'], - key: 'bin_maxbins_10_b_mid', - method: "value", - value: 0 - }, - { - type: 'stack', - groupby: ['bin_maxbins_10_b_mid'], - field: 'sum_a', + assert.deepEqual(parse(model), { + dimensionFieldDef: {"bin": {maxbins: 10}, "field": "b", "type": "quantitative"}, + facetby: [], + stackField: 'sum_a', + stackby: ['c'], sort: { field: ['c'], order: ['descending'] }, - as: ['sum_a_start', 'sum_a_end'], - offset: 'zero' - } - ]); + offset: 'zero', + impute: true, + as: ['sum_a_start', 'sum_a_end'] + }); + + assert.deepEqual(assemble(model), [ + { + type: 'formula', + expr: '(datum[\"bin_maxbins_10_b\"]+datum[\"bin_maxbins_10_b_end\"])/2', + as: 'bin_maxbins_10_b_mid' + }, + { + type: 'impute', + field: 'sum_a', + groupby: ['c'], + key: 'bin_maxbins_10_b_mid', + method: "value", + value: 0 + }, + { + type: 'stack', + groupby: ['bin_maxbins_10_b_mid'], + field: 'sum_a', + sort: { + field: ['c'], + order: ['descending'] + }, + as: ['sum_a_start', 'sum_a_end'], + offset: 'zero' + } + ]); + }); + }); + + describe('StackNode.makeFromTransform', () => { + it('should fill in offset and sort properly', () => { + const transform: Transform = { + stack : 'people', + groupby: ['age'], + as: ['v1', 'v2'] + }; + const stack = StackNode.makeFromTransform(null, transform); + assert.deepEqual(stack.assemble(), [{ + type: 'stack', + groupby: ['age'], + field: 'people', + offset: 'zero', + sort: {field: [] as string[], order: [] as VgComparatorOrder[]} as VgSort, + as: ['v1', 'v2'] + }]); + }); + + it('should fill in partial "as" field properly', () => { + const transform: Transform = { + stack : 'people', + groupby: ['age', 'gender'], + offset: 'normalize', + as: "val" + }; + const stack = StackNode.makeFromTransform(null, transform); + assert.deepEqual(stack.assemble(), [{ + type: 'stack', + groupby: ['age', 'gender'], + field: 'people', + offset: 'normalize', + sort: {field: [] as string[], order: [] as VgComparatorOrder[]} as VgSort, + as: ["val", "val_end"] + }]); + }); + + it('should handle complete "sort"', () => { + const transform: Transform = { + stack : 'people', + groupby: ['age', 'gender'], + offset: 'normalize', + sort: [{'field': 'height', 'order': 'ascending'}, + {'field': 'weight', 'order': 'descending'}], + as: 'val' + }; + const stack = StackNode.makeFromTransform(null, transform); + assert.deepEqual(stack.assemble(), [{ + type: 'stack', + groupby: ['age', 'gender'], + field: 'people', + offset: 'normalize', + sort: {field: ['height', 'weight'], order: ['ascending', 'descending']}, + as: ["val", "val_end"] + }]); + }); + + it('should handle incomplete "sort" field', () => { + const transform: Transform = { + stack : 'people', + groupby: ['age', 'gender'], + offset: 'normalize', + sort: [{'field': 'height'}], + as: 'val' + }; + const stack = StackNode.makeFromTransform(null, transform); + + assert.deepEqual(stack.assemble(), [{ + type: 'stack', + groupby: ['age', 'gender'], + field: 'people', + offset: 'normalize', + sort: {field: ['height'], order: ['ascending']}, + as: ["val", "val_end"] + }]); + }); + + }); + describe('StackNode.producedFields', () => { + it('should give producedfields correctly', () => { + const transform: Transform = { + stack: 'people', + groupby: ['age'], + as: 'people' + + }; + const stack = StackNode.makeFromTransform(null, transform); + assert.deepEqual(stack.producedFields(), { + people: true, + people_end: true + }); + + }); + + it('should give producedFields correctly when in encoding channel', () => { + const model = parseUnitModelWithScale({ + "mark": "bar", + "encoding": { + "x": {"aggregate": "sum", "field": "a", "type": "quantitative"}, + "y": {"field": "b", "type": "nominal"}, + "color": {"field": "c", "type": "ordinal",} + } + }); + const stack = StackNode.makeFromEncoding(null, model); + assert.deepEqual(stack.producedFields(), { + sum_a_start: true, + sum_a_end: true + }); + }); }); });