Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Stack Transform #3771

Merged
merged 19 commits into from
May 22, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions src/compile/data/parse.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {MAIN, RAW} from '../../data';
import * as log from '../../log';
import {isAggregate, isBin, isCalculate, isFilter, isLookup, isTimeUnit, isWindow} from '../../transform';
import {isAggregate, isBin, isCalculate, isFilter, isLookup, isStack, isTimeUnit, isWindow} from '../../transform';
import {Dict, keys} from '../../util';
import {isFacetModel, isLayerModel, isUnitModel, Model} from '../model';
import {requiresSelectionId} from '../selection/selection';
Expand Down Expand Up @@ -86,6 +86,12 @@ export function parseTransformArray(head: DataFlowNode, model: Model, ancestorPa
for (const field of keys(window.producedFields())) {
ancestorParse.set(field, 'derived', false);
}
} else if (isStack(t)) {
const stack = head = StackNode.makeFromTransform(head, t);

for (const field of keys(stack.producedFields())) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This code is called repeatedly through this method. In a separate PR, we should refactor this part #3772.

ancestorParse.set(field, 'derived', false);
}
} else {
log.warn(log.message.invalidTransformIgnored(t));
return;
Expand Down Expand Up @@ -219,7 +225,7 @@ export function parseData(model: Model): DataComponent {
}
}

head = StackNode.make(head, model) || head;
head = StackNode.makeFromEncoding(head, model) || head;
}

if (isUnitModel(model)) {
Expand Down
111 changes: 81 additions & 30 deletions src/compile/data/stack.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import {isArray} from 'vega-util';
import {isArray, isString} from 'vega-util';
import {FieldDef, isFieldDef, vgField} from '../../fielddef';
import {StackOffset} from '../../stack';
import {StackTransform} from '../../transform';
import {duplicate} from '../../util';
import {VgSort, VgTransform} from '../../vega.schema';
import {VgComparatorOrder, VgSort, VgTransform} from '../../vega.schema';
import {sortParams} from '../common';
import {UnitModel} from './../unit';
import {DataFlowNode} from './dataflow';


function getStackByFields(model: UnitModel): string[] {
return model.stack.stackBy.reduce((fields, by) => {
const fieldDef = by.fieldDef;
Expand All @@ -21,35 +21,53 @@ function getStackByFields(model: UnitModel): string[] {
}

export interface StackComponent {

/**
* Faceted field.
*/
facetby: string[];

dimensionFieldDef: FieldDef<string>;
dimensionFieldDef?: FieldDef<string>;

/**
* Stack measure's field
* Stack measure's field. Used in makeFromEncoding.
*/
field: string;
stackField: string;

/**
* Level of detail fields for each level in the stacked charts such as color or detail.
* Used in makeFromEncoding.
*/
stackby: string[];
stackby?: string[];

/**
* Field that determines order of levels in the stacked charts.
* Used in both but optional in transform.
*/
sort: VgSort;

/** Mode for stacking marks. */
/** Mode for stacking marks.
*/
offset: StackOffset;

/**
* Whether to impute the data before stacking.
* Whether to impute the data before stacking. Used only in makeFromEncoding.
*/
impute?: boolean;

/**
* The data fields to group by.
*/
groupby?: string[];
/**
* Output field names of each stack field.
*/
impute: boolean;
as: string[];

}

function isValidAsArray(as: string[] | string): as is string[] {
return isArray(as) && as.every(s => isString(s)) && as.length >1;
}

export class StackNode extends DataFlowNode {
Expand All @@ -65,7 +83,42 @@ export class StackNode extends DataFlowNode {
this._stack = stack;
}

public static make(parent: DataFlowNode, model: UnitModel) {
public static makeFromTransform(parent: DataFlowNode, stackTransform: StackTransform) {

const {stack, groupby, as, offset='zero'} = stackTransform;

const sortFields: string[] = [];
const sortOrder: VgComparatorOrder[] = [];
if (stackTransform.sort !== undefined) {
for (const sortField of stackTransform.sort) {
sortFields.push(sortField.field);
sortOrder.push(sortField.order === undefined ? 'ascending' : sortField.order as VgComparatorOrder);
}
}
const sort: VgSort = {
field: sortFields,
order: sortOrder,
};
let normalizedAs: Array<string>;
if (isValidAsArray(as)) {
normalizedAs = as;
} else if(isString(as)) {
normalizedAs = [as, as + '_end'];
} else {
normalizedAs = [stackTransform.stack + '_start', stackTransform.stack + '_end'];
}

return new StackNode (parent, {
stackField: stack,
groupby,
offset,
sort,
facetby: [],
as: normalizedAs
});

}
public static makeFromEncoding(parent: DataFlowNode, model: UnitModel) {

const stackProperties = model.stack;

Expand Down Expand Up @@ -93,15 +146,19 @@ export class StackNode extends DataFlowNode {
return s;
}, {field:[], order: []});
}
// Refactored to add "as" in the make phase so that we can get producedFields
// from the as property
const field = model.vgField(stackProperties.fieldChannel);

return new StackNode(parent, {
dimensionFieldDef,
field: model.vgField(stackProperties.fieldChannel),
stackField:field,
facetby: [],
stackby,
sort,
offset: stackProperties.offset,
impute: stackProperties.impute,
as: [field + '_start', field + '_end']
});
}

Expand All @@ -116,7 +173,7 @@ export class StackNode extends DataFlowNode {
public dependentFields() {
const out = {};

out[this._stack.field] = true;
out[this._stack.stackField] = true;

this.getGroupbyFields().forEach(f => out[f] = true);
this._stack.facetby.forEach(f => out[f] = true);
Expand All @@ -127,16 +184,14 @@ export class StackNode extends DataFlowNode {
}

public producedFields() {
const out = {};

out[this._stack.field + '_start'] = true;
out[this._stack.field + '_end'] = true;

return out;
return this._stack.as.reduce((result, item) => {
result[item] = true;
return result;
}, {});
}

private getGroupbyFields() {
const {dimensionFieldDef, impute} = this._stack;
const {dimensionFieldDef, impute, groupby} = this._stack;
if (dimensionFieldDef) {
if (dimensionFieldDef.bin) {
if (impute) {
Expand All @@ -152,15 +207,14 @@ export class StackNode extends DataFlowNode {
}
return [vgField(dimensionFieldDef)];
}
return [];
return groupby || [];
Copy link
Member

@kanitw kanitw May 22, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm ok with this for now.

Once you decouple Impute from stacking (#1514), we should make dimensionFieldDef no longer a part of StackComponent and just have StackNode.makeFromEncoding() create a groupby, using this method. At that point, we can basically remove facetBy and just have push new dimension in the facet to groupby instead.

(Basically, if you look at the code, the final groupby in the stack transform = groupBy that's from dimension fieldDef + facetBy)

}

public assemble(): VgTransform[] {
const transform: VgTransform[] = [];
const {facetby, dimensionFieldDef, stackField: field, stackby, sort, offset, impute, as} = this._stack;

const {facetby, field: stackField, dimensionFieldDef, impute, offset, sort, stackby} = this._stack;

// Impute
// Impute
if (impute && dimensionFieldDef) {
const dimensionField = dimensionFieldDef ? vgField(dimensionFieldDef, {binSuffix: 'mid'}): undefined;

Expand All @@ -180,7 +234,7 @@ export class StackNode extends DataFlowNode {

transform.push({
type: 'impute',
field: stackField,
field,
groupby: stackby,
key: dimensionField,
method: 'value',
Expand All @@ -192,12 +246,9 @@ export class StackNode extends DataFlowNode {
transform.push({
type: 'stack',
groupby: this.getGroupbyFields().concat(facetby),
field: stackField,
field,
sort,
as: [
stackField + '_start',
stackField + '_end'
],
as,
offset
});

Expand Down
38 changes: 37 additions & 1 deletion src/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,38 @@ export interface AggregatedFieldDef {
}


/**
* @hide
*/
export interface StackTransform {
/**
* The field which is stacked.
*/
stack: string;
/**
* The data fields to group by.
*/
groupby: string[];
/**
* Mode for stacking marks.
* __Default value:__ `"zero"`
*/
offset?: 'zero' | 'center' | 'normalize';
/**
* Field that determines the order of leaves in the stacked charts.
*/
sort?: SortField[];
/**
* Output field names. This can be either a string or an array of strings with
* two elements denoting the name for the fields for stack start and stack end
* respectively.
* If a single string(eg."val") is provided, the end field will be "val_end".
*/
as: string | string[];

}


export type WindowOnlyOp =
'row_number' |
'rank' |
Expand Down Expand Up @@ -239,7 +271,11 @@ export function isAggregate(t: Transform): t is AggregateTransform {
return t['aggregate'] !== undefined;
}

export type Transform = FilterTransform | CalculateTransform | LookupTransform | BinTransform | TimeUnitTransform | AggregateTransform | WindowTransform;
export function isStack(t: Transform): t is StackTransform {
return t['stack'] !== undefined;
}

export type Transform = FilterTransform | CalculateTransform | LookupTransform | BinTransform | TimeUnitTransform | AggregateTransform | WindowTransform | StackTransform;

export function normalizeTransform(transform: Transform[]) {
return transform.map(t => {
Expand Down
Loading