diff --git a/build/vega-lite-schema.json b/build/vega-lite-schema.json
index 02c23e5b0f..60274992fb 100644
--- a/build/vega-lite-schema.json
+++ b/build/vega-lite-schema.json
@@ -12,6 +12,19 @@
},
{
"$ref": "#/definitions/ArgminDef"
+ },
+ {
+ "$ref": "#/definitions/ExponentialDef"
+ }
+ ]
+ },
+ "AggregateFieldOp": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/NonArgAggregateFieldOp"
+ },
+ {
+ "$ref": "#/definitions/ExponentialDef"
}
]
},
@@ -80,7 +93,7 @@
"description": "The data field for which to compute aggregate function. This is required for all aggregation operations except `\"count\"`."
},
"op": {
- "$ref": "#/definitions/AggregateOp",
+ "$ref": "#/definitions/AggregateFieldOp",
"description": "The aggregation operation to apply to the fields (e.g., `\"sum\"`, `\"average\"`, or `\"count\"`). See the [full list of supported aggregation operations](https://vega.github.io/vega-lite/docs/aggregate.html#ops) for more information."
}
},
@@ -8797,6 +8810,18 @@
],
"type": "string"
},
+ "ExponentialDef": {
+ "additionalProperties": false,
+ "properties": {
+ "exponential": {
+ "type": "number"
+ }
+ },
+ "required": [
+ "exponential"
+ ],
+ "type": "object"
+ },
"Expr": {
"type": "string"
},
@@ -17862,6 +17887,35 @@
],
"type": "object"
},
+ "NonArgAggregateFieldOp": {
+ "enum": [
+ "argmax",
+ "argmin",
+ "average",
+ "count",
+ "distinct",
+ "max",
+ "mean",
+ "median",
+ "min",
+ "missing",
+ "product",
+ "q1",
+ "q3",
+ "ci0",
+ "ci1",
+ "stderr",
+ "stdev",
+ "stdevp",
+ "sum",
+ "valid",
+ "values",
+ "variance",
+ "variancep",
+ "exponentialb"
+ ],
+ "type": "string"
+ },
"NonArgAggregateOp": {
"enum": [
"average",
@@ -17885,7 +17939,6 @@
"values",
"variance",
"variancep",
- "exponential",
"exponentialb"
],
"type": "string"
diff --git a/examples/compiled/layer_line_exponential.png b/examples/compiled/layer_line_exponential.png
new file mode 100644
index 0000000000..fbe062cda4
Binary files /dev/null and b/examples/compiled/layer_line_exponential.png differ
diff --git a/examples/compiled/layer_line_exponential.svg b/examples/compiled/layer_line_exponential.svg
new file mode 100644
index 0000000000..94f8fa7421
--- /dev/null
+++ b/examples/compiled/layer_line_exponential.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/examples/compiled/layer_line_exponential.vg.json b/examples/compiled/layer_line_exponential.vg.json
new file mode 100644
index 0000000000..b2b1360c19
--- /dev/null
+++ b/examples/compiled/layer_line_exponential.vg.json
@@ -0,0 +1,137 @@
+{
+ "$schema": "https://vega.github.io/schema/vega/v5.json",
+ "background": "white",
+ "padding": 5,
+ "width": 400,
+ "height": 200,
+ "style": "cell",
+ "data": [
+ {
+ "name": "source_0",
+ "values": [
+ {"price": 9.2, "year": 2020},
+ {"price": 10.76, "year": 2020},
+ {"price": 36.88, "year": 2021},
+ {"price": 3.44, "year": 2021},
+ {"price": 10.55, "year": 2022},
+ {"price": 9.65, "year": 2022},
+ {"price": 7.15, "year": 2023},
+ {"price": 15, "year": 2023},
+ {"price": 10.19, "year": 2024},
+ {"price": 8.86, "year": 2024}
+ ]
+ },
+ {
+ "name": "data_0",
+ "source": "source_0",
+ "transform": [
+ {
+ "type": "aggregate",
+ "groupby": ["year"],
+ "ops": ["exponential", "mean"],
+ "fields": ["price", "price"],
+ "as": ["exponential_price", "mean_price"],
+ "aggregate_params": [0.5, null]
+ }
+ ]
+ }
+ ],
+ "marks": [
+ {
+ "name": "layer_0_marks",
+ "type": "line",
+ "style": ["line"],
+ "sort": {"field": "datum[\"year\"]"},
+ "from": {"data": "data_0"},
+ "encode": {
+ "update": {
+ "stroke": {"value": "#4c78a8"},
+ "description": {
+ "signal": "\"year: \" + (isValid(datum[\"year\"]) ? datum[\"year\"] : \"\"+datum[\"year\"]) + \"; Avg Price: \" + (format(datum[\"mean_price\"], \"\"))"
+ },
+ "x": {"scale": "x", "field": "year"},
+ "y": {"scale": "y", "field": "mean_price"},
+ "defined": {
+ "signal": "isValid(datum[\"mean_price\"]) && isFinite(+datum[\"mean_price\"])"
+ }
+ }
+ }
+ },
+ {
+ "name": "layer_1_marks",
+ "type": "line",
+ "style": ["line"],
+ "sort": {"field": "datum[\"year\"]"},
+ "from": {"data": "data_0"},
+ "encode": {
+ "update": {
+ "opacity": {"value": 0.5},
+ "stroke": {"value": "#4c78a8"},
+ "description": {
+ "signal": "\"year: \" + (isValid(datum[\"year\"]) ? datum[\"year\"] : \"\"+datum[\"year\"]) + \"; Exponential of price: \" + (format(datum[\"exponential_price\"], \"\"))"
+ },
+ "x": {"scale": "x", "field": "year"},
+ "y": {"scale": "y", "field": "exponential_price"},
+ "defined": {
+ "signal": "isValid(datum[\"exponential_price\"]) && isFinite(+datum[\"exponential_price\"])"
+ }
+ }
+ }
+ }
+ ],
+ "scales": [
+ {
+ "name": "x",
+ "type": "point",
+ "domain": {"data": "data_0", "field": "year", "sort": true},
+ "range": [0, {"signal": "width"}],
+ "padding": 0.5
+ },
+ {
+ "name": "y",
+ "type": "linear",
+ "domain": {
+ "data": "data_0",
+ "fields": ["mean_price", "exponential_price"]
+ },
+ "range": [{"signal": "height"}, 0],
+ "nice": true,
+ "zero": true
+ }
+ ],
+ "axes": [
+ {
+ "scale": "y",
+ "orient": "left",
+ "gridScale": "x",
+ "grid": true,
+ "tickCount": {"signal": "ceil(height/40)"},
+ "domain": false,
+ "labels": false,
+ "aria": false,
+ "maxExtent": 0,
+ "minExtent": 0,
+ "ticks": false,
+ "zindex": 0
+ },
+ {
+ "scale": "x",
+ "orient": "bottom",
+ "grid": false,
+ "title": "year",
+ "labelAlign": "right",
+ "labelAngle": 270,
+ "labelBaseline": "middle",
+ "zindex": 0
+ },
+ {
+ "scale": "y",
+ "orient": "left",
+ "grid": false,
+ "title": "Avg Price",
+ "labelOverlap": true,
+ "tickCount": {"signal": "ceil(height/40)"},
+ "zindex": 0
+ }
+ ]
+}
diff --git a/examples/specs/layer_line_exponential.vl.json b/examples/specs/layer_line_exponential.vl.json
new file mode 100644
index 0000000000..680b853cac
--- /dev/null
+++ b/examples/specs/layer_line_exponential.vl.json
@@ -0,0 +1,52 @@
+{
+ "$schema": "https://vega.github.io/schema/vega-lite/v5.json",
+ "width": 400,
+ "data": {
+ "values": [
+ {"price": 9.2, "year": 2020},
+ {"price": 10.76, "year": 2020},
+ {"price": 36.88, "year": 2021},
+ {"price": 3.44, "year": 2021},
+ {"price": 10.55, "year": 2022},
+ {"price": 9.65, "year": 2022},
+ {"price": 7.15, "year": 2023},
+ {"price": 15.0, "year": 2023},
+ {"price": 10.19, "year": 2024},
+ {"price": 8.86, "year": 2024}
+ ]
+ },
+ "layer": [
+ {
+ "mark": "line",
+ "encoding": {
+ "x": {
+ "field": "year"
+ },
+ "y": {
+ "field": "price",
+ "aggregate": "mean",
+ "type": "quantitative",
+ "title": "Avg Price"
+ }
+ }
+ },
+ {
+ "mark": {
+ "type": "line",
+ "opacity": 0.5
+ },
+ "encoding": {
+ "x": {
+ "field": "year"
+ },
+ "y": {
+ "field": "price",
+ "aggregate": {
+ "exponential": 0.5
+ },
+ "type": "quantitative"
+ }
+ }
+ }
+ ]
+}
diff --git a/site/docs/transform/aggregate.md b/site/docs/transform/aggregate.md
index 1a86f11762..41bab5fb05 100644
--- a/site/docs/transform/aggregate.md
+++ b/site/docs/transform/aggregate.md
@@ -121,6 +121,7 @@ The supported **aggregation operations** are:
| max | The maximum field value. |
| argmin | An input data object containing the minimum field value.
**Note:** When used inside encoding, `argmin` must be specified as an object. (See below for an example.) |
| argmax | An input data object containing the maximum field value.
**Note:** When used inside encoding, `argmax` must be specified as an object. (See below for an example.) |
+| exponential | The exponential moving average of field values.
**Note:** `exponential` must be specified as an object. (See below for an example.) |
{:#argmax}
@@ -141,3 +142,11 @@ This is equivalent to specifying argmax in an aggregate transform and encode its
`argmax` can be useful for getting the last value in a line for label placement.
+
+## Exponential
+
+You can use the exponential aggregate to get the exponential moving average of a field, which forms a smooth alternative to a simple moving average. It is commonly used when you want to more heavily weigh recent values, but don't want a discontinuous drop-off when numbers drop out of an averaging window.
+
+The exponential operation can be specified by setting it to an object with `exponential` describing the weight (a number between 0 and 1) to use in the transformation.
+
+
diff --git a/src/aggregate.ts b/src/aggregate.ts
index 910db866f0..501192d694 100644
--- a/src/aggregate.ts
+++ b/src/aggregate.ts
@@ -45,9 +45,13 @@ export interface ArgmaxDef {
argmax: FieldName;
}
-export type NonArgAggregateOp = Exclude;
+export interface ExponentialDef {
+ exponential: number;
+}
+
+export type NonArgAggregateOp = Exclude;
-export type Aggregate = NonArgAggregateOp | ArgmaxDef | ArgminDef;
+export type Aggregate = NonArgAggregateOp | ArgmaxDef | ArgminDef | ExponentialDef;
export function isArgminDef(a: Aggregate | string): a is ArgminDef {
return !!a && !!a['argmin'];
@@ -57,7 +61,11 @@ export function isArgmaxDef(a: Aggregate | string): a is ArgmaxDef {
return !!a && !!a['argmax'];
}
-export function isAggregateOp(a: string | ArgminDef | ArgmaxDef): a is AggregateOp {
+export function isExponentialDef(a: Aggregate | string): a is ExponentialDef {
+ return !!a && !!a['exponential'];
+}
+
+export function isAggregateOp(a: string | ArgminDef | ArgmaxDef | ExponentialDef): a is AggregateOp {
return isString(a) && !!AGGREGATE_OP_INDEX[a];
}
diff --git a/src/channeldef.ts b/src/channeldef.ts
index 0b8b96f0eb..5dc38f9815 100644
--- a/src/channeldef.ts
+++ b/src/channeldef.ts
@@ -1,6 +1,6 @@
import {Gradient, ScaleType, SignalRef, Text} from 'vega';
import {isArray, isBoolean, isNumber, isString} from 'vega-util';
-import {Aggregate, isAggregateOp, isArgmaxDef, isArgminDef, isCountingAggregateOp} from './aggregate';
+import {Aggregate, isAggregateOp, isArgmaxDef, isArgminDef, isCountingAggregateOp, isExponentialDef} from './aggregate';
import {Axis} from './axis';
import {autoMaxBins, Bin, BinParams, binToString, isBinned, isBinning} from './bin';
import {
@@ -805,7 +805,7 @@ export function vgField(
if (!opt.nofn) {
if (isOpFieldDef(fieldDef)) {
- fn = fieldDef.op;
+ fn = isExponentialDef(fieldDef.op) ? 'exponential' : fieldDef.op;
} else {
const {bin, aggregate, timeUnit} = fieldDef;
if (isBinning(bin)) {
@@ -819,7 +819,7 @@ export function vgField(
argAccessor = `["${field}"]`;
field = `argmin_${aggregate.argmin}`;
} else {
- fn = String(aggregate);
+ fn = isExponentialDef(aggregate) ? 'exponential' : String(aggregate);
}
} else if (timeUnit && !isBinnedTimeUnit(timeUnit)) {
fn = timeUnitToString(timeUnit);
@@ -893,7 +893,8 @@ export function verbalTitleFormatter(fieldDef: FieldDefBase, config: Con
} else if (isArgminDef(aggregate)) {
return `${field} for min ${aggregate.argmin}`;
} else {
- return `${titleCase(aggregate)} of ${field}`;
+ const aggregateOp = isExponentialDef(aggregate) ? 'exponential' : aggregate;
+ return `${titleCase(aggregateOp)} of ${field}`;
}
}
return field;
@@ -909,7 +910,9 @@ export function functionalTitleFormatter(fieldDef: FieldDefBase) {
const timeUnitParams = timeUnit && !isBinnedTimeUnit(timeUnit) ? normalizeTimeUnit(timeUnit) : undefined;
- const fn = aggregate || timeUnitParams?.unit || (timeUnitParams?.maxbins && 'timeunit') || (isBinning(bin) && 'bin');
+ const aggregateOp = isExponentialDef(aggregate) ? 'exponential' : aggregate;
+ const fn =
+ aggregateOp || timeUnitParams?.unit || (timeUnitParams?.maxbins && 'timeunit') || (isBinning(bin) && 'bin');
if (fn) {
return `${fn.toUpperCase()}(${field})`;
} else {
@@ -1136,7 +1139,14 @@ export function initFieldDef(
const fieldDef = {...fd};
// Drop invalid aggregate
- if (!compositeMark && aggregate && !isAggregateOp(aggregate) && !isArgmaxDef(aggregate) && !isArgminDef(aggregate)) {
+ if (
+ !compositeMark &&
+ aggregate &&
+ !isAggregateOp(aggregate) &&
+ !isArgmaxDef(aggregate) &&
+ !isArgminDef(aggregate) &&
+ !isExponentialDef(aggregate)
+ ) {
log.warn(log.message.invalidAggregate(aggregate));
delete fieldDef.aggregate;
}
diff --git a/src/compile/data/aggregate.ts b/src/compile/data/aggregate.ts
index a8a9cefba1..7e46f6c96f 100644
--- a/src/compile/data/aggregate.ts
+++ b/src/compile/data/aggregate.ts
@@ -1,5 +1,5 @@
import {AggregateOp, AggregateTransform as VgAggregateTransform} from 'vega';
-import {isArgmaxDef, isArgminDef} from '../../aggregate';
+import {isArgmaxDef, isArgminDef, isExponentialDef} from '../../aggregate';
import {
Channel,
getPositionChannelFromLatLong,
@@ -27,7 +27,7 @@ import {DataFlowNode} from './dataflow';
import {isRectBasedMark} from '../../mark';
import {OFFSETTED_RECT_END_SUFFIX, OFFSETTED_RECT_START_SUFFIX} from './timeunit';
-type Measures = Dict>>>;
+type Measures = Dict; aggregateParam?: number}>>>;
function addDimension(dims: Set, channel: Channel, fieldDef: FieldDef, model: ModelWithField) {
const channelDef2 = isUnitModel(model) ? model.encoding[getSecondaryRangeChannel(channel)] : undefined;
@@ -71,7 +71,14 @@ function mergeMeasures(parentMeasures: Measures, childMeasures: Measures) {
for (const op of keys(ops)) {
if (field in parentMeasures) {
// add operator to existing measure field
- parentMeasures[field][op] = new Set([...(parentMeasures[field][op] ?? []), ...ops[op]]);
+ parentMeasures[field][op] = {
+ aliases: new Set([...(parentMeasures[field][op]?.aliases ?? []), ...ops[op].aliases])
+ };
+
+ const childAggregateParam = childMeasures[field][op].aggregateParam;
+ if (childAggregateParam) {
+ parentMeasures[field][op].aggregateParam = childAggregateParam;
+ }
} else {
parentMeasures[field] = {[op]: ops[op]};
}
@@ -121,23 +128,28 @@ export class AggregateNode extends DataFlowNode {
if (aggregate) {
if (aggregate === 'count') {
meas['*'] ??= {};
- meas['*']['count'] = new Set([vgField(fieldDef, {forAs: true})]);
+ meas['*']['count'] = {aliases: new Set([vgField(fieldDef, {forAs: true})])};
} else {
if (isArgminDef(aggregate) || isArgmaxDef(aggregate)) {
const op = isArgminDef(aggregate) ? 'argmin' : 'argmax';
const argField = aggregate[op];
meas[argField] ??= {};
- meas[argField][op] = new Set([vgField({op, field: argField}, {forAs: true})]);
+ meas[argField][op] = {aliases: new Set([vgField({op, field: argField}, {forAs: true})])};
+ } else if (isExponentialDef(aggregate)) {
+ const op = 'exponential';
+ const aggregateParam = aggregate[op];
+ meas[field] ??= {};
+ meas[field][op] = {aliases: new Set([vgField(fieldDef, {forAs: true})]), aggregateParam: aggregateParam};
} else {
meas[field] ??= {};
- meas[field][aggregate] = new Set([vgField(fieldDef, {forAs: true})]);
+ meas[field][aggregate] = {aliases: new Set([vgField(fieldDef, {forAs: true})])};
}
// For scale channel with domain === 'unaggregated', add min/max so we can use their union as unaggregated domain
if (isScaleChannel(channel) && model.scaleDomain(channel) === 'unaggregated') {
meas[field] ??= {};
- meas[field]['min'] = new Set([vgField({field, aggregate: 'min'}, {forAs: true})]);
- meas[field]['max'] = new Set([vgField({field, aggregate: 'max'}, {forAs: true})]);
+ meas[field]['min'] = {aliases: new Set([vgField({field, aggregate: 'min'}, {forAs: true})])};
+ meas[field]['max'] = {aliases: new Set([vgField({field, aggregate: 'max'}, {forAs: true})])};
}
}
} else {
@@ -159,12 +171,23 @@ export class AggregateNode extends DataFlowNode {
for (const s of t.aggregate) {
const {op, field, as} = s;
if (op) {
+ const aliases = new Set([as ? as : vgField(s, {forAs: true})]);
if (op === 'count') {
meas['*'] ??= {};
- meas['*']['count'] = new Set([as ? as : vgField(s, {forAs: true})]);
+ meas['*']['count'] = {aliases};
} else {
- meas[field] ??= {};
- meas[field][op] = new Set([as ? as : vgField(s, {forAs: true})]);
+ if (isExponentialDef(op)) {
+ const opName = 'exponential';
+ const aggregateParam = op[opName];
+ meas[field] ??= {};
+ meas[field][opName] = {
+ aliases,
+ aggregateParam
+ };
+ } else {
+ meas[field] ??= {};
+ meas[field][op] = {aliases};
+ }
}
}
}
@@ -202,7 +225,7 @@ export class AggregateNode extends DataFlowNode {
for (const field of keys(this.measures)) {
for (const op of keys(this.measures[field])) {
- const m = this.measures[field][op];
+ const m = this.measures[field][op].aliases;
if (m.size === 0) {
out.add(`${op}_${field}`);
} else {
@@ -222,13 +245,15 @@ export class AggregateNode extends DataFlowNode {
const ops: AggregateOp[] = [];
const fields: string[] = [];
const as: string[] = [];
+ const aggregateParams: (number | null)[] = [];
for (const field of keys(this.measures)) {
for (const op of keys(this.measures[field])) {
- for (const alias of this.measures[field][op]) {
+ for (const alias of this.measures[field][op].aliases) {
as.push(alias);
ops.push(op);
fields.push(field === '*' ? null : replacePathInField(field));
+ aggregateParams.push(this.measures[field][op].aggregateParam || null);
}
}
}
@@ -241,6 +266,10 @@ export class AggregateNode extends DataFlowNode {
as
};
+ if (aggregateParams.some(param => typeof param === 'number')) {
+ result.aggregate_params = aggregateParams;
+ }
+
return result;
}
}
diff --git a/src/compositemark/boxplot.ts b/src/compositemark/boxplot.ts
index e047c2de09..66af9aaff6 100644
--- a/src/compositemark/boxplot.ts
+++ b/src/compositemark/boxplot.ts
@@ -7,7 +7,13 @@ import * as log from '../log';
import {isMarkDef, MarkDef, MarkInvalidMixins} from '../mark';
import {NormalizerParams} from '../normalize';
import {GenericUnitSpec, NormalizedLayerSpec, NormalizedUnitSpec} from '../spec';
-import {AggregatedFieldDef, CalculateTransform, JoinAggregateTransform, Transform} from '../transform';
+import {
+ AggregatedFieldDef,
+ CalculateTransform,
+ JoinAggregateTransform,
+ NonArgAggregateFieldOp,
+ Transform
+} from '../transform';
import {isEmpty, omit} from '../util';
import {CompositeMarkNormalizer} from './base';
import {
@@ -21,6 +27,7 @@ import {
partLayerMixins,
PartsMixins
} from './common';
+import {FieldName} from '../channeldef';
export const BOXPLOT = 'boxplot' as const;
export type BoxPlot = typeof BOXPLOT;
@@ -333,7 +340,9 @@ export function normalizeBoxPlot(
};
}
-function boxParamsQuartiles(continousAxisField: string): AggregatedFieldDef[] {
+function boxParamsQuartiles(
+ continousAxisField: string
+): {op: NonArgAggregateFieldOp; field: FieldName; as: FieldName}[] {
return [
{
op: 'q1',
diff --git a/src/encoding.ts b/src/encoding.ts
index 0dafbc975e..71f4934f11 100644
--- a/src/encoding.ts
+++ b/src/encoding.ts
@@ -1,6 +1,5 @@
-import {AggregateOp} from 'vega';
import {array, isArray} from 'vega-util';
-import {isArgmaxDef, isArgminDef} from './aggregate';
+import {isArgmaxDef, isArgminDef, isExponentialDef} from './aggregate';
import {isBinned, isBinning} from './bin';
import {
ANGLE,
@@ -91,7 +90,7 @@ import {Config} from './config';
import * as log from './log';
import {Mark} from './mark';
import {EncodingFacetMapping} from './spec/facet';
-import {AggregatedFieldDef, BinTransform, TimeUnitTransform} from './transform';
+import {AggregatedFieldDef, BinTransform, AggregateFieldOp, TimeUnitTransform} from './transform';
import {isContinuous, isDiscrete, QUANTITATIVE, TEMPORAL} from './type';
import {keys, some} from './util';
import {isSignalRef} from './vega.schema';
@@ -415,7 +414,7 @@ export function extractTransformsFromEncoding(oldEncoding: Encoding, config
};
if (aggOp) {
- let op: AggregateOp;
+ let op: AggregateFieldOp;
if (isArgmaxDef(aggOp)) {
op = 'argmax';
@@ -425,6 +424,9 @@ export function extractTransformsFromEncoding(oldEncoding: Encoding, config
op = 'argmin';
newField = vgField({op: 'argmin', field: aggOp.argmin}, {forAs: true});
newFieldDef.field = `${newField}.${field}`;
+ } else if (isExponentialDef(aggOp)) {
+ const exponentialValue = aggOp['exponential'];
+ op = {exponential: exponentialValue};
} else if (aggOp !== 'boxplot' && aggOp !== 'errorbar' && aggOp !== 'errorband') {
op = aggOp;
}
diff --git a/src/transform.ts b/src/transform.ts
index 33b10bb723..84b8131048 100644
--- a/src/transform.ts
+++ b/src/transform.ts
@@ -8,6 +8,7 @@ import {ParameterName} from './parameter';
import {normalizePredicate, Predicate} from './predicate';
import {SortField} from './sort';
import {TimeUnit, TimeUnitTransformParams} from './timeunit';
+import {ExponentialDef} from './aggregate';
export interface FilterTransform {
/**
@@ -98,13 +99,17 @@ export interface AggregateTransform {
groupby?: FieldName[];
}
+export type NonArgAggregateFieldOp = Exclude;
+
+export type AggregateFieldOp = NonArgAggregateFieldOp | ExponentialDef;
+
export interface AggregatedFieldDef {
/**
* The aggregation operation to apply to the fields (e.g., `"sum"`, `"average"`, or `"count"`).
* See the [full list of supported aggregation operations](https://vega.github.io/vega-lite/docs/aggregate.html#ops)
* for more information.
*/
- op: AggregateOp;
+ op: AggregateFieldOp;
/**
* The data field for which to compute aggregate function. This is required for all aggregation operations except `"count"`.
diff --git a/test/channeldef.test.ts b/test/channeldef.test.ts
index c25574932c..8c93addffe 100644
--- a/test/channeldef.test.ts
+++ b/test/channeldef.test.ts
@@ -39,6 +39,12 @@ describe('fieldDef', () => {
);
});
+ it('should support exponential operations', () => {
+ expect(vgField({aggregate: {exponential: 0.23}, field: 'a'}, {expr: 'datum'})).toBe('datum["exponential_a"]');
+
+ expect(vgField({op: {exponential: 0.54}, field: 'a', as: 'b'})).toBe('exponential_a');
+ });
+
it('should support prefix and field names with space', () => {
expect(vgField({field: 'foo bar'}, {prefix: 'prefix'})).toBe('prefix_foo bar');
});
diff --git a/test/compile/data/aggregate.test.ts b/test/compile/data/aggregate.test.ts
index b0d882152f..6425e9993c 100644
--- a/test/compile/data/aggregate.test.ts
+++ b/test/compile/data/aggregate.test.ts
@@ -49,9 +49,9 @@ describe('compile/data/aggregate', () => {
const agg = AggregateNode.makeFromEncoding(null, model);
expect(agg.hash()).toBe(
- `Aggregate {"dimensions":"Set(\\"Origin\\")","measures":{"*":{"count":"Set(\\"${internalField(
+ `Aggregate {"dimensions":"Set(\\"Origin\\")","measures":{"*":{"count":{"aliases":"Set(\\"${internalField(
'count'
- )}\\")"},"Acceleration":{"sum":"Set(\\"sum_Acceleration\\")"}}}`
+ )}\\")"}},"Acceleration":{"sum":{"aliases":"Set(\\"sum_Acceleration\\")"}}}}`
);
});
});
@@ -268,6 +268,26 @@ describe('compile/data/aggregate', () => {
as: ['argmin_a', 'argmax_c']
});
});
+
+ it('should produce the correct summary component for exponential', () => {
+ const model = parseUnitModel({
+ mark: 'point',
+ encoding: {
+ x: {aggregate: {exponential: 0.25}, field: 'Displacement', type: 'quantitative'},
+ y: {aggregate: 'sum', field: 'Acceleration', type: 'quantitative'}
+ }
+ });
+
+ const agg = AggregateNode.makeFromEncoding(null, model);
+ expect(agg.assemble()).toEqual({
+ type: 'aggregate',
+ groupby: [],
+ ops: ['exponential', 'sum'],
+ fields: ['Displacement', 'Acceleration'],
+ as: ['exponential_Displacement', 'sum_Acceleration'],
+ aggregate_params: [0.25, null]
+ });
+ });
});
describe('makeFromTransform', () => {
@@ -309,6 +329,26 @@ describe('compile/data/aggregate', () => {
as: ['Displacement_mean', 'Displacement_max', 'Acceleration_sum']
});
});
+
+ it('should produce the correct summary component from transform array with exponential', () => {
+ const t: AggregateTransform = {
+ aggregate: [
+ {op: 'sum', field: 'Acceleration', as: 'Acceleration_sum'},
+ {op: {exponential: 0.3}, field: 'Displacement', as: 'Displacement_exponential'}
+ ],
+ groupby: ['Group']
+ };
+
+ const agg = AggregateNode.makeFromTransform(null, t);
+ expect(agg.assemble()).toEqual({
+ type: 'aggregate',
+ groupby: ['Group'],
+ ops: ['sum', 'exponential'],
+ fields: ['Acceleration', 'Displacement'],
+ as: ['Acceleration_sum', 'Displacement_exponential'],
+ aggregate_params: [null, 0.3]
+ });
+ });
});
describe('producedFields', () => {
@@ -336,17 +376,38 @@ describe('compile/data/aggregate', () => {
});
it('should merge AggregateNodes with same dimensions', () => {
const parent = new PlaceholderDataFlowNode(null);
- const agg1 = new AggregateNode(parent, new Set(['a', 'b']), {a: {mean: new Set(['a_mean'])}});
- const agg2 = new AggregateNode(parent, new Set(['a', 'b']), {b: {mean: new Set(['b_mean'])}});
+ const agg1 = new AggregateNode(parent, new Set(['a', 'b']), {a: {mean: {aliases: new Set(['a_mean'])}}});
+ const agg2 = new AggregateNode(parent, new Set(['a', 'b']), {b: {mean: {aliases: new Set(['b_mean'])}}});
expect(agg1.merge(agg2)).toBe(true);
expect(agg1.producedFields()).toEqual(new Set(['a_mean', 'b_mean']));
});
+ it('should merge AggregateNodes without losing aggregateParam', () => {
+ const parent = new PlaceholderDataFlowNode(null);
+ const agg1 = new AggregateNode(parent, new Set(['a', 'b']), {
+ a: {sum: {aliases: new Set(['a_sum'])}}
+ });
+ const agg2 = new AggregateNode(parent, new Set(['a', 'b']), {
+ b: {exponential: {aliases: new Set(['b_exponential']), aggregateParam: 0.5}}
+ });
+
+ expect(agg1.merge(agg2)).toBe(true);
+ expect(agg1.assemble()).toEqual({
+ ops: ['sum', 'exponential'],
+ type: 'aggregate',
+ as: ['a_sum', 'b_exponential'],
+ fields: ['a', 'b'],
+ groupby: ['a', 'b'],
+ aggregate_params: [null, 0.5]
+ });
+ });
});
describe('assemble()', () => {
it('should escape nested accesses', () => {
- const agg = new AggregateNode(null, new Set(['foo.bar']), {'foo.baz': {mean: new Set(['foo_baz_mean'])}});
+ const agg = new AggregateNode(null, new Set(['foo.bar']), {
+ 'foo.baz': {mean: {aliases: new Set(['foo_baz_mean'])}}
+ });
expect(agg.assemble()).toEqual({
as: ['foo_baz_mean'],
fields: ['foo\\.baz'],
diff --git a/test/compile/data/assemble.test.ts b/test/compile/data/assemble.test.ts
index 95efd81ea0..8615ea0ff2 100644
--- a/test/compile/data/assemble.test.ts
+++ b/test/compile/data/assemble.test.ts
@@ -35,7 +35,7 @@ describe('compile/data/assemble', () => {
const outputNodeRefCounts = {};
const raw = new OutputNode(null, 'rawOut', DataSourceType.Raw, outputNodeRefCounts);
raw.parent = src;
- const agg = new AggregateNode(null, new Set(['a']), {b: {count: new Set(['count_*'])}});
+ const agg = new AggregateNode(null, new Set(['a']), {b: {count: {aliases: new Set(['count_*'])}}});
agg.parent = raw;
const main = new OutputNode(null, 'mainOut', DataSourceType.Main, outputNodeRefCounts);
main.parent = agg;
diff --git a/test/compositemark/errorbar.test.ts b/test/compositemark/errorbar.test.ts
index f9d14a294a..ef8f91257b 100644
--- a/test/compositemark/errorbar.test.ts
+++ b/test/compositemark/errorbar.test.ts
@@ -1,4 +1,3 @@
-import {AggregateOp} from 'vega';
import {FieldName} from '../../src/channeldef';
import {ErrorBarCenter, ErrorBarExtent} from '../../src/compositemark/errorbar';
import {defaultConfig} from '../../src/config';
@@ -7,7 +6,7 @@ import {isMarkDef} from '../../src/mark';
import {normalize} from '../../src/normalize';
import {isLayerSpec, isUnitSpec} from '../../src/spec';
import {TopLevelUnitSpec} from '../../src/spec/unit';
-import {isAggregate, isCalculate, Transform} from '../../src/transform';
+import {AggregateFieldOp, isAggregate, isCalculate, Transform} from '../../src/transform';
import {some} from '../../src/util';
import {assertIsLayerSpec, assertIsUnitSpec} from '../util';
@@ -597,7 +596,7 @@ describe('normalizeErrorBar for all possible extents and centers with raw data i
}
});
-function isPartOfExtent(extent: ErrorBarExtent, op: AggregateOp) {
+function isPartOfExtent(extent: ErrorBarExtent, op: AggregateFieldOp) {
if (extent === 'ci') {
return op === 'ci0' || op === 'ci1';
} else if (extent === 'iqr') {
diff --git a/test/encoding.test.ts b/test/encoding.test.ts
index 847bda7c6d..9d4d66804b 100644
--- a/test/encoding.test.ts
+++ b/test/encoding.test.ts
@@ -320,6 +320,38 @@ describe('encoding', () => {
}
});
});
+ it('should extract aggregates with exponential operations from encoding', () => {
+ const output = extractTransformsFromEncoding(
+ initEncoding(
+ {
+ x: {field: 'a', type: 'quantitative'},
+ y: {
+ aggregate: {exponential: 0.3},
+ field: 'b',
+ type: 'quantitative'
+ }
+ },
+ 'line',
+ false,
+ defaultConfig
+ ),
+ defaultConfig
+ );
+ expect(output).toEqual({
+ bins: [],
+ timeUnits: [],
+ aggregate: [{op: {exponential: 0.3}, field: 'b', as: 'exponential_b'}],
+ groupby: ['a'],
+ encoding: {
+ x: {field: 'a', type: 'quantitative'},
+ y: {
+ field: 'exponential_b',
+ type: 'quantitative',
+ title: 'Exponential of b'
+ }
+ }
+ });
+ });
it('should extract binning from encoding', () => {
const output = extractTransformsFromEncoding(
initEncoding(