From befc33c4eb315e7d7d00567b8b5d875f0cfa71f5 Mon Sep 17 00:00:00 2001 From: Jason Etcovitch Date: Wed, 3 Feb 2021 14:26:33 -0500 Subject: [PATCH] feat: create trie programmatically in options --- src/builtin/tags/assign.ts | 2 +- src/builtin/tags/capture.ts | 2 +- src/builtin/tags/case.ts | 5 +- src/builtin/tags/cycle.ts | 2 +- src/builtin/tags/decrement.ts | 2 +- src/builtin/tags/for.ts | 2 +- src/builtin/tags/if.ts | 3 +- src/builtin/tags/include.ts | 2 +- src/builtin/tags/increment.ts | 2 +- src/builtin/tags/layout.ts | 2 +- src/builtin/tags/render.ts | 2 +- src/builtin/tags/tablerow.ts | 2 +- src/builtin/tags/unless.ts | 5 +- src/liquid-options.ts | 9 +- src/liquid.ts | 2 +- src/parser/match-operator.ts | 13 +-- src/parser/tokenizer.ts | 4 +- src/render/expression.ts | 5 +- src/template/tag/hash.ts | 2 +- src/template/value.ts | 2 +- src/tokens/tag-token.ts | 2 +- src/util/operator-trie.ts | 27 +++++ test/integration/liquid/operators-option.ts | 6 +- test/unit/parser/match-operator.ts | 25 +++-- test/unit/parser/tokenizer.ts | 116 ++++++++++---------- test/unit/render/expression.ts | 84 +++++++------- test/unit/template/output.ts | 6 +- test/unit/template/value.ts | 6 +- 28 files changed, 196 insertions(+), 146 deletions(-) create mode 100644 src/util/operator-trie.ts diff --git a/src/builtin/tags/assign.ts b/src/builtin/tags/assign.ts index d952215c62..683a561d17 100644 --- a/src/builtin/tags/assign.ts +++ b/src/builtin/tags/assign.ts @@ -2,7 +2,7 @@ import { Tokenizer, assert, TagImplOptions, TagToken, Context } from '../../type export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) this.key = tokenizer.readIdentifier().content tokenizer.skipBlank() assert(tokenizer.peek() === '=', () => `illegal token ${token.getText()}`) diff --git a/src/builtin/tags/capture.ts b/src/builtin/tags/capture.ts index 3b0aa1fd39..1adc4f062e 100644 --- a/src/builtin/tags/capture.ts +++ b/src/builtin/tags/capture.ts @@ -3,7 +3,7 @@ import { evalQuotedToken } from '../../render/expression' export default { parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(tagToken.args) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) this.variable = readVariableName(tokenizer) assert(this.variable, () => `${tagToken.args} not valid identifier`) diff --git a/src/builtin/tags/case.ts b/src/builtin/tags/case.ts index 17c086f54a..807f14c61b 100644 --- a/src/builtin/tags/case.ts +++ b/src/builtin/tags/case.ts @@ -26,10 +26,11 @@ export default { render: function * (ctx: Context, emitter: Emitter) { const r = this.liquid.renderer - const cond = yield new Expression(this.cond, this.liquid.options.operators).value(ctx) + const { operators, operatorsTrie } = this.liquid.options + const cond = yield new Expression(this.cond, operators, operatorsTrie).value(ctx) for (let i = 0; i < this.cases.length; i++) { const branch = this.cases[i] - const val = yield new Expression(branch.val, this.liquid.options.operators).value(ctx) + const val = yield new Expression(branch.val, operators, operatorsTrie).value(ctx) if (val === cond) { yield r.renderTemplates(branch.templates, ctx, emitter) return diff --git a/src/builtin/tags/cycle.ts b/src/builtin/tags/cycle.ts index 2d71be0484..86153f6b35 100644 --- a/src/builtin/tags/cycle.ts +++ b/src/builtin/tags/cycle.ts @@ -4,7 +4,7 @@ import { Tokenizer } from '../../parser/tokenizer' export default { parse: function (tagToken: TagToken) { - const tokenizer = new Tokenizer(tagToken.args) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) const group = tokenizer.readValue() tokenizer.skipBlank() diff --git a/src/builtin/tags/decrement.ts b/src/builtin/tags/decrement.ts index 0db11fe7f2..6c05506e57 100644 --- a/src/builtin/tags/decrement.ts +++ b/src/builtin/tags/decrement.ts @@ -3,7 +3,7 @@ import { isNumber, stringify } from '../../util/underscore' export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) this.variable = tokenizer.readIdentifier().content }, render: function (context: Context, emitter: Emitter) { diff --git a/src/builtin/tags/for.ts b/src/builtin/tags/for.ts index 94f23fd830..e477b579a1 100644 --- a/src/builtin/tags/for.ts +++ b/src/builtin/tags/for.ts @@ -6,7 +6,7 @@ import { Hash } from '../../template/tag/hash' export default { type: 'block', parse: function (token: TagToken, remainTokens: TopLevelToken[]) { - const toknenizer = new Tokenizer(token.args) + const toknenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) const variable = toknenizer.readIdentifier() const inStr = toknenizer.readIdentifier() diff --git a/src/builtin/tags/if.ts b/src/builtin/tags/if.ts index 493ac836a9..f9fc745183 100644 --- a/src/builtin/tags/if.ts +++ b/src/builtin/tags/if.ts @@ -29,9 +29,10 @@ export default { render: function * (ctx: Context, emitter: Emitter) { const r = this.liquid.renderer + const { operators, operatorsTrie } = this.liquid.options for (const branch of this.branches) { - const cond = yield new Expression(branch.cond, this.liquid.options.operators, ctx.opts.lenientIf).value(ctx) + const cond = yield new Expression(branch.cond, operators, operatorsTrie, ctx.opts.lenientIf).value(ctx) if (isTruthy(cond, ctx)) { yield r.renderTemplates(branch.templates, ctx, emitter) return diff --git a/src/builtin/tags/include.ts b/src/builtin/tags/include.ts index 504f4eb83c..a036e5d1c1 100644 --- a/src/builtin/tags/include.ts +++ b/src/builtin/tags/include.ts @@ -4,7 +4,7 @@ import BlockMode from '../../context/block-mode' export default { parse: function (token: TagToken) { const args = token.args - const tokenizer = new Tokenizer(args) + const tokenizer = new Tokenizer(args, this.liquid.options.operatorsTrie) this.file = this.liquid.options.dynamicPartials ? tokenizer.readValue() : tokenizer.readFileName() diff --git a/src/builtin/tags/increment.ts b/src/builtin/tags/increment.ts index 8107acae62..1fdc61605c 100644 --- a/src/builtin/tags/increment.ts +++ b/src/builtin/tags/increment.ts @@ -3,7 +3,7 @@ import { Tokenizer, Emitter, TagToken, Context, TagImplOptions } from '../../typ export default { parse: function (token: TagToken) { - const tokenizer = new Tokenizer(token.args) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) this.variable = tokenizer.readIdentifier().content }, render: function (context: Context, emitter: Emitter) { diff --git a/src/builtin/tags/layout.ts b/src/builtin/tags/layout.ts index f822c655d7..f69c842b23 100644 --- a/src/builtin/tags/layout.ts +++ b/src/builtin/tags/layout.ts @@ -3,7 +3,7 @@ import BlockMode from '../../context/block-mode' export default { parse: function (token: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(token.args) + const tokenizer = new Tokenizer(token.args, this.liquid.options.operatorsTrie) const file = this.liquid.options.dynamicPartials ? tokenizer.readValue() : tokenizer.readFileName() assert(file, () => `illegal argument "${token.args}"`) diff --git a/src/builtin/tags/render.ts b/src/builtin/tags/render.ts index 48d0c7f50e..79c1fce93c 100644 --- a/src/builtin/tags/render.ts +++ b/src/builtin/tags/render.ts @@ -6,7 +6,7 @@ import { evalQuotedToken, TypeGuards, Tokenizer, evalToken, Hash, Emitter, TagTo export default { parse: function (token: TagToken) { const args = token.args - const tokenizer = new Tokenizer(args) + const tokenizer = new Tokenizer(args, this.liquid.options.operatorsTrie) this.file = this.liquid.options.dynamicPartials ? tokenizer.readValue() : tokenizer.readFileName() diff --git a/src/builtin/tags/tablerow.ts b/src/builtin/tags/tablerow.ts index 8dc0dbd42b..995c1cb900 100644 --- a/src/builtin/tags/tablerow.ts +++ b/src/builtin/tags/tablerow.ts @@ -5,7 +5,7 @@ import { Tokenizer } from '../../parser/tokenizer' export default { parse: function (tagToken: TagToken, remainTokens: TopLevelToken[]) { - const tokenizer = new Tokenizer(tagToken.args) + const tokenizer = new Tokenizer(tagToken.args, this.liquid.options.operatorsTrie) this.variable = tokenizer.readIdentifier() tokenizer.skipBlank() diff --git a/src/builtin/tags/unless.ts b/src/builtin/tags/unless.ts index 7e830562a7..171a824609 100644 --- a/src/builtin/tags/unless.ts +++ b/src/builtin/tags/unless.ts @@ -29,7 +29,8 @@ export default { render: function * (ctx: Context, emitter: Emitter) { const r = this.liquid.renderer - const cond = yield new Expression(this.cond, this.liquid.options.operators, ctx.opts.lenientIf).value(ctx) + const { operators, operatorsTrie } = this.liquid.options + const cond = yield new Expression(this.cond, operators, operatorsTrie, ctx.opts.lenientIf).value(ctx) if (isFalsy(cond, ctx)) { yield r.renderTemplates(this.templates, ctx, emitter) @@ -37,7 +38,7 @@ export default { } for (const branch of this.branches) { - const cond = yield new Expression(branch.cond, this.liquid.options.operators, ctx.opts.lenientIf).value(ctx) + const cond = yield new Expression(branch.cond, operators, operatorsTrie, ctx.opts.lenientIf).value(ctx) if (isTruthy(cond, ctx)) { yield r.renderTemplates(branch.templates, ctx, emitter) return diff --git a/src/liquid-options.ts b/src/liquid-options.ts index 4224482f10..01dbdadb6c 100644 --- a/src/liquid-options.ts +++ b/src/liquid-options.ts @@ -4,6 +4,7 @@ import { Cache } from './cache/cache' import { LRU } from './cache/lru' import { FS } from './fs/fs' import { defaultOperators, Operators } from './render/operator' +import { createTrie, Trie } from './util/operator-trie' export interface LiquidOptions { /** A directory or an array of directories from where to resolve layout and include templates, and the filename passed to `.renderFile()`. If it's an array, the files are looked up in the order they occur in the array. Defaults to `["."]` */ @@ -55,6 +56,7 @@ export interface LiquidOptions { interface NormalizedOptions extends LiquidOptions { root?: string[]; cache?: Cache; + operatorsTrie?: Trie; } export interface NormalizedFullOptions extends NormalizedOptions { @@ -79,6 +81,7 @@ export interface NormalizedFullOptions extends NormalizedOptions { globals: object; keepOutputType: boolean; operators: Operators; + operatorsTrie: Trie; } export const defaultOptions: NormalizedFullOptions = { @@ -102,7 +105,8 @@ export const defaultOptions: NormalizedFullOptions = { lenientIf: false, globals: {}, keepOutputType: false, - operators: defaultOperators + operators: defaultOperators, + operatorsTrie: createTrie(defaultOperators) } export function normalize (options?: LiquidOptions): NormalizedOptions { @@ -117,6 +121,9 @@ export function normalize (options?: LiquidOptions): NormalizedOptions { else cache = options.cache ? new LRU(1024) : undefined options.cache = cache } + if (options.hasOwnProperty('operators')) { + (options as NormalizedOptions).operatorsTrie = createTrie(options.operators!) + } return options as NormalizedOptions } diff --git a/src/liquid.ts b/src/liquid.ts index 84804d24c9..f34308eaed 100644 --- a/src/liquid.ts +++ b/src/liquid.ts @@ -39,7 +39,7 @@ export class Liquid { forOwn(builtinFilters, (handler: FilterImplOptions, name: string) => this.registerFilter(snakeCase(name), handler)) } public parse (html: string, filepath?: string): Template[] { - const tokenizer = new Tokenizer(html, filepath) + const tokenizer = new Tokenizer(html, this.options.operatorsTrie, filepath) const tokens = tokenizer.readTopLevelTokens(this.options) return this.parser.parse(tokens) } diff --git a/src/parser/match-operator.ts b/src/parser/match-operator.ts index 206c7a5f12..d0794bdd71 100644 --- a/src/parser/match-operator.ts +++ b/src/parser/match-operator.ts @@ -1,16 +1,7 @@ import { IDENTIFIER } from '../util/character' +import { Trie } from '../util/operator-trie' -const trie = { - a: { n: { d: { end: true, needBoundary: true } } }, - o: { r: { end: true, needBoundary: true } }, - c: { o: { n: { t: { a: { i: { n: { s: { end: true, needBoundary: true } } } } } } } }, - '=': { '=': { end: true } }, - '!': { '=': { end: true } }, - '>': { end: true, '=': { end: true } }, - '<': { end: true, '=': { end: true } } -} - -export function matchOperator (str: string, begin: number, end = str.length) { +export function matchOperator (str: string, begin: number, trie: Trie, end = str.length) { let node = trie let i = begin let info diff --git a/src/parser/tokenizer.ts b/src/parser/tokenizer.ts index e53d409218..927a8ccc6a 100644 --- a/src/parser/tokenizer.ts +++ b/src/parser/tokenizer.ts @@ -22,6 +22,7 @@ import { TokenizationError } from '../util/error' import { NormalizedFullOptions, defaultOptions } from '../liquid-options' import { TYPES, QUOTE, BLANK, IDENTIFIER } from '../util/character' import { matchOperator } from './match-operator' +import { Trie } from '../util/operator-trie' export class Tokenizer { p = 0 @@ -30,6 +31,7 @@ export class Tokenizer { constructor ( private input: string, + private trie: Trie, private file: string = '' ) { this.N = input.length @@ -54,7 +56,7 @@ export class Tokenizer { } readOperator (): OperatorToken | undefined { this.skipBlank() - const end = matchOperator(this.input, this.p, this.p + 8) + const end = matchOperator(this.input, this.p, this.trie, this.p + 8) if (end === -1) return return new OperatorToken(this.input, this.p, (this.p = end), this.file) } diff --git a/src/render/expression.ts b/src/render/expression.ts index 05395985e7..53146dcf96 100644 --- a/src/render/expression.ts +++ b/src/render/expression.ts @@ -13,6 +13,7 @@ import { range, toValue } from '../util/underscore' import { Tokenizer } from '../parser/tokenizer' import { Operators } from '../render/operator' import { UndefinedVariableError, InternalUndefinedVariableError } from '../util/error' +import { Trie } from '../util/operator-trie' export class Expression { private operands: any[] = [] @@ -20,8 +21,8 @@ export class Expression { private lenient: boolean private operators: Operators - public constructor (str: string, operators: Operators, lenient = false) { - const tokenizer = new Tokenizer(str) + public constructor (str: string, operators: Operators, operatorsTrie: Trie, lenient = false) { + const tokenizer = new Tokenizer(str, operatorsTrie) this.postfix = [...toPostfix(tokenizer.readExpression())] this.lenient = lenient this.operators = operators diff --git a/src/template/tag/hash.ts b/src/template/tag/hash.ts index ae15b99873..37a1003e2d 100644 --- a/src/template/tag/hash.ts +++ b/src/template/tag/hash.ts @@ -13,7 +13,7 @@ import { Tokenizer } from '../../parser/tokenizer' export class Hash { hash: { [key: string]: any } = {} constructor (markup: string) { - const tokenizer = new Tokenizer(markup) + const tokenizer = new Tokenizer(markup, {}) for (const hash of tokenizer.readHashes()) { this.hash[hash.name.content] = hash.value } diff --git a/src/template/value.ts b/src/template/value.ts index 74853673d6..cd64a7e17a 100644 --- a/src/template/value.ts +++ b/src/template/value.ts @@ -14,7 +14,7 @@ export class Value { * @param str the value to be valuated, eg.: "foobar" | truncate: 3 */ public constructor (str: string, private readonly filterMap: FilterMap, liquid: Liquid) { - const tokenizer = new Tokenizer(str) + const tokenizer = new Tokenizer(str, liquid.options.operatorsTrie) this.initial = tokenizer.readValue() this.filters = tokenizer.readFilters().map(({ name, args }) => new Filter(name, this.filterMap.get(name), args, liquid)) } diff --git a/src/tokens/tag-token.ts b/src/tokens/tag-token.ts index 25503c8fae..6163a9e1be 100644 --- a/src/tokens/tag-token.ts +++ b/src/tokens/tag-token.ts @@ -18,7 +18,7 @@ export class TagToken extends DelimitedToken { const value = input.slice(begin + tagDelimiterLeft.length, end - tagDelimiterRight.length) super(TokenKind.Tag, value, input, begin, end, trimTagLeft, trimTagRight, file) - const tokenizer = new Tokenizer(this.content) + const tokenizer = new Tokenizer(this.content, options.operatorsTrie) this.name = tokenizer.readIdentifier().getText() if (!this.name) throw new TokenizationError(`illegal tag syntax`, this) diff --git a/src/util/operator-trie.ts b/src/util/operator-trie.ts new file mode 100644 index 0000000000..b1f083dbf4 --- /dev/null +++ b/src/util/operator-trie.ts @@ -0,0 +1,27 @@ +import { Operators } from '../render/operator' + +export interface Trie { + [key: string]: any; +} + +export function createTrie (operators: Operators): Trie { + const trie: Trie = {} + for (const [name, handler] of Object.entries(operators)) { + let node = trie + + for (let i = 0; i < name.length; i++) { + const c = name[i] + node[c] = node[c] || {} + + if (i === name.length - 1 && c !== '=') { + node[c].needBoundary = true + } + + node = node[c] + } + + node.handler = handler + node.end = true + } + return trie +} diff --git a/test/integration/liquid/operators-option.ts b/test/integration/liquid/operators-option.ts index 7d4578f29d..90da4454e6 100644 --- a/test/integration/liquid/operators-option.ts +++ b/test/integration/liquid/operators-option.ts @@ -19,7 +19,9 @@ describe('LiquidOptions#operators', function () { }) it('should evaluate a custom operator', async function () { - const result = await engine.parseAndRender('{% if "foo" isFooBar "bar" %}True{% endif %}') - expect(result).to.equal('True') + const first = await engine.parseAndRender('{% if "foo" isFooBar "bar" %}True{% else %}False{% endif %}') + expect(first).to.equal('True') + const second = await engine.parseAndRender('{% if "foo" isFooBar "foo" %}True{% else %}False{% endif %}') + expect(second).to.equal('False') }) }) diff --git a/test/unit/parser/match-operator.ts b/test/unit/parser/match-operator.ts index 3f5c5058c7..8c55f43e80 100644 --- a/test/unit/parser/match-operator.ts +++ b/test/unit/parser/match-operator.ts @@ -1,26 +1,29 @@ import { expect } from 'chai' import { matchOperator } from '../../../src/parser/match-operator' +import { defaultOperators } from '../../../src/types' +import { createTrie } from '../../../src/util/operator-trie' describe('parser/matchOperator()', function () { + const trie = createTrie(defaultOperators) it('should match contains', () => { - expect(matchOperator('contains', 0)).to.equal(8) + expect(matchOperator('contains', 0, trie)).to.equal(8) }) it('should match comparision', () => { - expect(matchOperator('>', 0)).to.equal(1) - expect(matchOperator('>=', 0)).to.equal(2) - expect(matchOperator('<', 0)).to.equal(1) - expect(matchOperator('<=', 0)).to.equal(2) + expect(matchOperator('>', 0, trie)).to.equal(1) + expect(matchOperator('>=', 0, trie)).to.equal(2) + expect(matchOperator('<', 0, trie)).to.equal(1) + expect(matchOperator('<=', 0, trie)).to.equal(2) }) it('should match binary logic', () => { - expect(matchOperator('and', 0)).to.equal(3) - expect(matchOperator('or', 0)).to.equal(2) + expect(matchOperator('and', 0, trie)).to.equal(3) + expect(matchOperator('or', 0, trie)).to.equal(2) }) it('should not match if word not terminate', () => { - expect(matchOperator('true1', 0)).to.equal(-1) - expect(matchOperator('containsa', 0)).to.equal(-1) + expect(matchOperator('true1', 0, trie)).to.equal(-1) + expect(matchOperator('containsa', 0, trie)).to.equal(-1) }) it('should match if word boundary found', () => { - expect(matchOperator('>=1', 0)).to.equal(2) - expect(matchOperator('contains b', 0)).to.equal(8) + expect(matchOperator('>=1', 0, trie)).to.equal(2) + expect(matchOperator('contains b', 0, trie)).to.equal(8) }) }) diff --git a/test/unit/parser/tokenizer.ts b/test/unit/parser/tokenizer.ts index 4bd7096688..5b44bdb6e7 100644 --- a/test/unit/parser/tokenizer.ts +++ b/test/unit/parser/tokenizer.ts @@ -9,59 +9,63 @@ import { TagToken } from '../../../src/tokens/tag-token' import { QuotedToken } from '../../../src/tokens/quoted-token' import { OutputToken } from '../../../src/tokens/output-token' import { HTMLToken } from '../../../src/tokens/html-token' +import { createTrie } from '../../../src/util/operator-trie' +import { defaultOperators } from '../../../src/types' describe('Tokenizer', function () { + const trie = createTrie(defaultOperators) + it('should read quoted', () => { - expect(new Tokenizer('"foo" ff').readQuoted()!.getText()).to.equal('"foo"') - expect(new Tokenizer(' "foo"ff').readQuoted()!.getText()).to.equal('"foo"') + expect(new Tokenizer('"foo" ff', trie).readQuoted()!.getText()).to.equal('"foo"') + expect(new Tokenizer(' "foo"ff', trie).readQuoted()!.getText()).to.equal('"foo"') }) it('should read value', () => { - expect(new Tokenizer('a[ b][ "c d" ]').readValueOrThrow().getText()).to.equal('a[ b][ "c d" ]') - expect(new Tokenizer('a.b[c[d.e]]').readValueOrThrow().getText()).to.equal('a.b[c[d.e]]') + expect(new Tokenizer('a[ b][ "c d" ]', trie).readValueOrThrow().getText()).to.equal('a[ b][ "c d" ]') + expect(new Tokenizer('a.b[c[d.e]]', trie).readValueOrThrow().getText()).to.equal('a.b[c[d.e]]') }) it('should read identifier', () => { - expect(new Tokenizer('foo bar').readIdentifier()).to.haveOwnProperty('content', 'foo') - expect(new Tokenizer('foo bar').readWord()).to.haveOwnProperty('content', 'foo') + expect(new Tokenizer('foo bar', trie).readIdentifier()).to.haveOwnProperty('content', 'foo') + expect(new Tokenizer('foo bar', trie).readWord()).to.haveOwnProperty('content', 'foo') }) it('should read number value', () => { - const token: NumberToken = new Tokenizer('2.33.2').readValueOrThrow() as any + const token: NumberToken = new Tokenizer('2.33.2', trie).readValueOrThrow() as any expect(token).to.be.instanceOf(NumberToken) expect(token.whole.getText()).to.equal('2') expect(token.decimal!.getText()).to.equal('33') expect(token.getText()).to.equal('2.33') }) it('should read quoted value', () => { - const value = new Tokenizer('"foo"a').readValue() + const value = new Tokenizer('"foo"a', trie).readValue() expect(value).to.be.instanceOf(QuotedToken) expect(value!.getText()).to.equal('"foo"') }) it('should read property access value', () => { - expect(new Tokenizer('a[b]["c d"]').readValueOrThrow().getText()).to.equal('a[b]["c d"]') + expect(new Tokenizer('a[b]["c d"]', trie).readValueOrThrow().getText()).to.equal('a[b]["c d"]') }) it('should read quoted property access value', () => { - const value = new Tokenizer('["a prop"]').readValue() + const value = new Tokenizer('["a prop"]', trie).readValue() expect(value).to.be.instanceOf(PropertyAccessToken) expect((value as PropertyAccessToken).variable.getText()).to.equal('"a prop"') }) it('should throw for broken quoted property access', () => { - const tokenizer = new Tokenizer('[5]') + const tokenizer = new Tokenizer('[5]', trie) expect(() => tokenizer.readValueOrThrow()).to.throw() }) it('should throw for incomplete quoted property access', () => { - const tokenizer = new Tokenizer('["a prop"') + const tokenizer = new Tokenizer('["a prop"', trie) expect(() => tokenizer.readValueOrThrow()).to.throw() }) it('should read hash', () => { - const hash1 = new Tokenizer('foo: 3').readHash() + const hash1 = new Tokenizer('foo: 3', trie).readHash() expect(hash1!.name.content).to.equal('foo') expect(hash1!.value!.getText()).to.equal('3') - const hash2 = new Tokenizer(', foo: a[ "bar"]').readHash() + const hash2 = new Tokenizer(', foo: a[ "bar"]', trie).readHash() expect(hash2!.name.content).to.equal('foo') expect(hash2!.value!.getText()).to.equal('a[ "bar"]') }) it('should read multiple hashs', () => { - const hashes = new Tokenizer(', limit: 3 reverse offset:off').readHashes() + const hashes = new Tokenizer(', limit: 3 reverse offset:off', trie).readHashes() expect(hashes).to.have.lengthOf(3) const [limit, reverse, offset] = hashes expect(limit.name.content).to.equal('limit') @@ -74,7 +78,7 @@ describe('Tokenizer', function () { expect(offset.value!.getText()).to.equal('off') }) it('should read hash value with property access', () => { - const hashes = new Tokenizer('cols: 2, rows: data["rows"]').readHashes() + const hashes = new Tokenizer('cols: 2, rows: data["rows"]', trie).readHashes() expect(hashes).to.have.lengthOf(2) const [cols, rols] = hashes @@ -87,7 +91,7 @@ describe('Tokenizer', function () { describe('#readTopLevelTokens()', () => { it('should read HTML token', function () { const html = '

Lorem Ipsum

' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) @@ -96,7 +100,7 @@ describe('Tokenizer', function () { }) it('should read tag token', function () { const html = '

{% for p in a[1]%}

' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -107,7 +111,7 @@ describe('Tokenizer', function () { }) it('should allow unclosed tag inside {% raw %}', function () { const html = '{%raw%} {%if%} {%else {%endraw%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -116,7 +120,7 @@ describe('Tokenizer', function () { }) it('should allow unclosed endraw tag inside {% raw %}', function () { const html = '{%raw%} {%endraw {%raw%} {%endraw%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -125,12 +129,12 @@ describe('Tokenizer', function () { }) it('should throw when {% raw %} not closed', function () { const html = '{%raw%} {%endraw {%raw%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) expect(() => tokenizer.readTopLevelTokens()).to.throw('raw "{%raw%} {%end..." not closed, line:1, col:8') }) it('should read output token', function () { const html = '

{{foo | date: "%Y-%m-%d"}}

' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(3) @@ -140,7 +144,7 @@ describe('Tokenizer', function () { }) it('should handle consecutive value and tags', function () { const html = '{{foo}}{{bar}}{%foo%}{%bar%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(4) @@ -162,7 +166,7 @@ describe('Tokenizer', function () { }) it('should keep white spaces and newlines', function () { const html = '{%foo%}\n{%bar %} \n {%alice%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(5) expect(tokens[1]).instanceOf(HTMLToken) @@ -172,7 +176,7 @@ describe('Tokenizer', function () { }) it('should handle multiple lines tag', function () { const html = '{%foo\na:a\nb:1.23\n%}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) expect(tokens[0]).instanceOf(TagToken) @@ -181,7 +185,7 @@ describe('Tokenizer', function () { }) it('should handle multiple lines value', function () { const html = '{{foo\n|date:\n"%Y-%m-%d"\n}}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) expect(tokens[0]).instanceOf(OutputToken) @@ -189,7 +193,7 @@ describe('Tokenizer', function () { }) it('should handle complex object property access', function () { const html = '{{ obj["my:property with anything"] }}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const tokens = tokenizer.readTopLevelTokens() expect(tokens.length).to.equal(1) const output = tokens[0] as OutputToken @@ -198,18 +202,18 @@ describe('Tokenizer', function () { }) it('should throw if tag not closed', function () { const html = '{% assign foo = bar {{foo}}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) expect(() => tokenizer.readTopLevelTokens()).to.throw(/tag "{% assign foo..." not closed/) }) it('should throw if output not closed', function () { - const tokenizer = new Tokenizer('{{name}') + const tokenizer = new Tokenizer('{{name}', trie) expect(() => tokenizer.readTopLevelTokens()).to.throw(/output "{{name}" not closed/) }) }) describe('#readTagToken()', () => { it('should skip quoted delimiters', function () { const html = '{% assign a = "%} {% }} {{" %}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const token = tokenizer.readTagToken() expect(token).instanceOf(TagToken) @@ -220,7 +224,7 @@ describe('Tokenizer', function () { describe('#readOutputToken()', () => { it('should skip quoted delimiters', function () { const html = '{{ "%} {%" | append: "}} {{" }}' - const tokenizer = new Tokenizer(html) + const tokenizer = new Tokenizer(html, trie) const token = tokenizer.readOutputToken() console.log(token) @@ -230,7 +234,7 @@ describe('Tokenizer', function () { }) describe('#readRange()', () => { it('should read `(1..3)`', () => { - const range = new Tokenizer('(1..3)').readRange() + const range = new Tokenizer('(1..3)', trie).readRange() expect(range).to.be.instanceOf(RangeToken) expect(range!.getText()).to.deep.equal('(1..3)') const { lhs, rhs } = range! @@ -240,23 +244,23 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.equal('3') }) it('should throw for `(..3)`', () => { - expect(() => new Tokenizer('(..3)').readRange()).to.throw('unexpected token "..3)", value expected') + expect(() => new Tokenizer('(..3)', trie).readRange()).to.throw('unexpected token "..3)", value expected') }) it('should read `(a.b..c["..d"])`', () => { - const range = new Tokenizer('(a.b..c["..d"])').readRange() + const range = new Tokenizer('(a.b..c["..d"])', trie).readRange() expect(range).to.be.instanceOf(RangeToken) expect(range!.getText()).to.deep.equal('(a.b..c["..d"])') }) }) describe('#readFilter()', () => { it('should read a simple filter', function () { - const tokenizer = new Tokenizer('| plus') + const tokenizer = new Tokenizer('| plus', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token).to.have.property('args').to.deep.equal([]) }) it('should read a filter with argument', function () { - const tokenizer = new Tokenizer(' | plus: 1') + const tokenizer = new Tokenizer(' | plus: 1', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -266,18 +270,18 @@ describe('Tokenizer', function () { expect(one.getText()).to.equal('1') }) it('should read a filter with colon but no argument', function () { - const tokenizer = new Tokenizer('| plus:') + const tokenizer = new Tokenizer('| plus:', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token).to.have.property('args').to.deep.equal([]) }) it('should read null if name not found', function () { - const tokenizer = new Tokenizer('|') + const tokenizer = new Tokenizer('|', trie) const token = tokenizer.readFilter() expect(token).to.be.null }) it('should read a filter with k/v argument', function () { - const tokenizer = new Tokenizer(' | plus: a:1') + const tokenizer = new Tokenizer(' | plus: a:1', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -288,7 +292,7 @@ describe('Tokenizer', function () { expect(v.getText()).to.equal('1') }) it('should read a filter with "arr[0]" argument', function () { - const tokenizer = new Tokenizer('| plus: arr[0]') + const tokenizer = new Tokenizer('| plus: arr[0]', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -301,7 +305,7 @@ describe('Tokenizer', function () { expect(pa.props[0].getText()).to.equal('0') }) it('should read a filter with obj.foo argument', function () { - const tokenizer = new Tokenizer('| plus: obj.foo') + const tokenizer = new Tokenizer('| plus: obj.foo', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -314,7 +318,7 @@ describe('Tokenizer', function () { expect(pa.props[0].getText()).to.equal('foo') }) it('should read a filter with obj["foo"] argument', function () { - const tokenizer = new Tokenizer('| plus: obj["good luck"]') + const tokenizer = new Tokenizer('| plus: obj["good luck"]', trie) const token = tokenizer.readFilter() expect(token).to.have.property('name', 'plus') expect(token!.args).to.have.lengthOf(1) @@ -328,7 +332,7 @@ describe('Tokenizer', function () { }) describe('#readFilters()', () => { it('should read simple filters', function () { - const tokenizer = new Tokenizer('| plus: 3 | capitalize') + const tokenizer = new Tokenizer('| plus: 3 | capitalize', trie) const tokens = tokenizer.readFilters() expect(tokens).to.have.lengthOf(2) @@ -341,7 +345,7 @@ describe('Tokenizer', function () { expect(tokens[1].args).to.have.lengthOf(0) }) it('should read filters', function () { - const tokenizer = new Tokenizer('| plus: a:3 | capitalize | append: foo[a.b["c d"]]') + const tokenizer = new Tokenizer('| plus: a:3 | capitalize | append: foo[a.b["c d"]]', trie) const tokens = tokenizer.readFilters() expect(tokens).to.have.lengthOf(3) @@ -364,14 +368,14 @@ describe('Tokenizer', function () { }) describe('#readExpression()', () => { it('should read expression `a `', () => { - const exp = [...new Tokenizer('a ').readExpression()] + const exp = [...new Tokenizer('a ', trie).readExpression()] expect(exp).to.have.lengthOf(1) expect(exp[0]).to.be.instanceOf(PropertyAccessToken) expect(exp[0].getText()).to.deep.equal('a') }) it('should read expression `a[][b]`', () => { - const exp = [...new Tokenizer('a[][b]').readExpression()] + const exp = [...new Tokenizer('a[][b]', trie).readExpression()] expect(exp).to.have.lengthOf(1) const pa = exp[0] as PropertyAccessToken @@ -386,7 +390,7 @@ describe('Tokenizer', function () { expect(p2.getText()).to.equal('b') }) it('should read expression `a.`', () => { - const exp = [...new Tokenizer('a.').readExpression()] + const exp = [...new Tokenizer('a.', trie).readExpression()] expect(exp).to.have.lengthOf(1) const pa = exp[0] as PropertyAccessToken @@ -395,14 +399,14 @@ describe('Tokenizer', function () { expect(pa.props).to.have.lengthOf(0) }) it('should read expression `a ==`', () => { - const exp = [...new Tokenizer('a ==').readExpression()] + const exp = [...new Tokenizer('a ==', trie).readExpression()] expect(exp).to.have.lengthOf(1) expect(exp[0]).to.be.instanceOf(PropertyAccessToken) expect(exp[0].getText()).to.deep.equal('a') }) it('should read expression `a==b`', () => { - const exp = new Tokenizer('a==b').readExpression() + const exp = new Tokenizer('a==b', trie).readExpression() const [a, equals, b] = exp expect(a).to.be.instanceOf(PropertyAccessToken) @@ -415,11 +419,11 @@ describe('Tokenizer', function () { expect(b.getText()).to.deep.equal('b') }) it('should read expression `^`', () => { - const exp = new Tokenizer('^').readExpression() + const exp = new Tokenizer('^', trie).readExpression() expect([...exp]).to.deep.equal([]) }) it('should read expression `a == b`', () => { - const exp = new Tokenizer('a == b').readExpression() + const exp = new Tokenizer('a == b', trie).readExpression() const [a, equals, b] = exp expect(a).to.be.instanceOf(PropertyAccessToken) @@ -432,7 +436,7 @@ describe('Tokenizer', function () { expect(b.getText()).to.deep.equal('b') }) it('should read expression `(1..3) contains 3`', () => { - const exp = new Tokenizer('(1..3) contains 3').readExpression() + const exp = new Tokenizer('(1..3) contains 3', trie).readExpression() const [range, contains, rhs] = exp expect(range).to.be.instanceOf(RangeToken) @@ -445,7 +449,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('3') }) it('should read expression `a[b] == c`', () => { - const exp = new Tokenizer('a[b] == c').readExpression() + const exp = new Tokenizer('a[b] == c', trie).readExpression() const [lhs, contains, rhs] = exp expect(lhs).to.be.instanceOf(PropertyAccessToken) @@ -458,7 +462,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('c') }) it('should read expression `c[a["b"]] >= c`', () => { - const exp = new Tokenizer('c[a["b"]] >= c').readExpression() + const exp = new Tokenizer('c[a["b"]] >= c', trie).readExpression() const [lhs, op, rhs] = exp expect(lhs).to.be.instanceOf(PropertyAccessToken) @@ -471,7 +475,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('c') }) it('should read expression `"][" == var`', () => { - const exp = new Tokenizer('"][" == var').readExpression() + const exp = new Tokenizer('"][" == var', trie).readExpression() const [lhs, equals, rhs] = exp expect(lhs).to.be.instanceOf(QuotedToken) @@ -484,7 +488,7 @@ describe('Tokenizer', function () { expect(rhs.getText()).to.deep.equal('var') }) it('should read expression `"\\\'" == "\\""`', () => { - const exp = new Tokenizer('"\\\'" == "\\""').readExpression() + const exp = new Tokenizer('"\\\'" == "\\""', trie).readExpression() const [lhs, equals, rhs] = exp expect(lhs).to.be.instanceOf(QuotedToken) diff --git a/test/unit/render/expression.ts b/test/unit/render/expression.ts index e134cefcfc..7c48c7a5c3 100644 --- a/test/unit/render/expression.ts +++ b/test/unit/render/expression.ts @@ -3,12 +3,14 @@ import { expect } from 'chai' import { Context } from '../../../src/context/context' import { toThenable } from '../../../src/util/async' import { defaultOperators } from '../../../src/render/operator' +import { createTrie } from '../../../src/util/operator-trie' describe('Expression', function () { const ctx = new Context({}) + const trie = createTrie(defaultOperators) it('should throw when context not defined', done => { - toThenable(new Expression('foo', defaultOperators).value(undefined!)) + toThenable(new Expression('foo', defaultOperators, trie).value(undefined!)) .then(() => done(new Error('should not resolved'))) .catch(err => { expect(err.message).to.match(/context not defined/) @@ -18,19 +20,19 @@ describe('Expression', function () { describe('single value', function () { it('should eval literal', async function () { - expect(await toThenable(new Expression('2.4', defaultOperators).value(ctx))).to.equal(2.4) - expect(await toThenable(new Expression('"foo"', defaultOperators).value(ctx))).to.equal('foo') - expect(await toThenable(new Expression('false', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('2.4', defaultOperators, trie).value(ctx))).to.equal(2.4) + expect(await toThenable(new Expression('"foo"', defaultOperators, trie).value(ctx))).to.equal('foo') + expect(await toThenable(new Expression('false', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should eval range expression', async function () { const ctx = new Context({ two: 2 }) - expect(await toThenable(new Expression('(2..4)', defaultOperators).value(ctx))).to.deep.equal([2, 3, 4]) - expect(await toThenable(new Expression('(two..4)', defaultOperators).value(ctx))).to.deep.equal([2, 3, 4]) + expect(await toThenable(new Expression('(2..4)', defaultOperators, trie).value(ctx))).to.deep.equal([2, 3, 4]) + expect(await toThenable(new Expression('(two..4)', defaultOperators, trie).value(ctx))).to.deep.equal([2, 3, 4]) }) it('should eval literal', async function () { - expect(await toThenable(new Expression('2.4', defaultOperators).value(ctx))).to.equal(2.4) - expect(await toThenable(new Expression('"foo"', defaultOperators).value(ctx))).to.equal('foo') - expect(await toThenable(new Expression('false', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('2.4', defaultOperators, trie).value(ctx))).to.equal(2.4) + expect(await toThenable(new Expression('"foo"', defaultOperators, trie).value(ctx))).to.equal('foo') + expect(await toThenable(new Expression('false', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should eval property access', async function () { @@ -39,112 +41,112 @@ describe('Expression', function () { coo: 'bar', doo: { foo: 'bar', bar: { foo: 'bar' } } }) - expect(await toThenable(new Expression('foo.bar', defaultOperators).value(ctx))).to.equal('BAR') - expect(await toThenable(new Expression('foo["bar"]', defaultOperators).value(ctx))).to.equal('BAR') - expect(await toThenable(new Expression('foo[coo]', defaultOperators).value(ctx))).to.equal('BAR') - expect(await toThenable(new Expression('foo[doo.foo]', defaultOperators).value(ctx))).to.equal('BAR') - expect(await toThenable(new Expression('foo[doo["foo"]]', defaultOperators).value(ctx))).to.equal('BAR') - expect(await toThenable(new Expression('doo[coo].foo', defaultOperators).value(ctx))).to.equal('bar') + expect(await toThenable(new Expression('foo.bar', defaultOperators, trie).value(ctx))).to.equal('BAR') + expect(await toThenable(new Expression('foo["bar"]', defaultOperators, trie).value(ctx))).to.equal('BAR') + expect(await toThenable(new Expression('foo[coo]', defaultOperators, trie).value(ctx))).to.equal('BAR') + expect(await toThenable(new Expression('foo[doo.foo]', defaultOperators, trie).value(ctx))).to.equal('BAR') + expect(await toThenable(new Expression('foo[doo["foo"]]', defaultOperators, trie).value(ctx))).to.equal('BAR') + expect(await toThenable(new Expression('doo[coo].foo', defaultOperators, trie).value(ctx))).to.equal('bar') }) }) describe('simple expression', function () { it('should return false for "1==2"', async () => { - expect(await toThenable(new Expression('1==2', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('1==2', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return true for "1<2"', async () => { - expect(await toThenable(new Expression('1<2', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('1<2', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return true for "1 < 2"', async () => { - expect(await toThenable(new Expression('1 < 2', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('1 < 2', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return true for "1 < 2"', async () => { - expect(await toThenable(new Expression('1 < 2', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('1 < 2', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return true for "2 <= 2"', async () => { - expect(await toThenable(new Expression('2 <= 2', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('2 <= 2', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return true for "one <= two"', async () => { const ctx = new Context({ one: 1, two: 2 }) - expect(await toThenable(new Expression('one <= two', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('one <= two', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return false for "x contains "x""', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('x contains "x"', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('x contains "x"', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return true for "x contains "X""', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('x contains "X"', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('x contains "X"', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return false for "1 contains "x""', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('1 contains "x"', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('1 contains "x"', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return false for "y contains "x""', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('y contains "x"', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('y contains "x"', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return false for "z contains "x""', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('z contains "x"', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('z contains "x"', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return true for "(1..5) contains 3"', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('(1..5) contains 3', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('(1..5) contains 3', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should return false for "(1..5) contains 6"', async () => { const ctx = new Context({ x: 'XXX' }) - expect(await toThenable(new Expression('(1..5) contains 6', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('(1..5) contains 6', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should return true for ""<=" == "<=""', async () => { - expect(await toThenable(new Expression('"<=" == "<="', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('"<=" == "<="', defaultOperators, trie).value(ctx))).to.equal(true) }) }) it('should allow space in quoted value', async function () { const ctx = new Context({ space: ' ' }) - expect(await toThenable(new Expression('" " == space', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('" " == space', defaultOperators, trie).value(ctx))).to.equal(true) }) describe('escape', () => { it('should escape quote', async function () { const ctx = new Context({ quote: '"' }) - expect(await toThenable(new Expression('"\\"" == quote', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('"\\"" == quote', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should escape square bracket', async function () { const ctx = new Context({ obj: { ']': 'bracket' } }) - expect(await toThenable(new Expression('obj["]"] == "bracket"', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('obj["]"] == "bracket"', defaultOperators, trie).value(ctx))).to.equal(true) }) }) describe('complex expression', function () { it('should support value or value', async function () { - expect(await toThenable(new Expression('false or true', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('false or true', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should support < and contains', async function () { - expect(await toThenable(new Expression('1 < 2 and x contains "x"', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('1 < 2 and x contains "x"', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should support < or contains', async function () { - expect(await toThenable(new Expression('1 < 2 or x contains "x"', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('1 < 2 or x contains "x"', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should support value and !=', async function () { const ctx = new Context({ empty: '' }) - expect(await toThenable(new Expression('empty and empty != ""', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('empty and empty != ""', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should recognize quoted value', async function () { - expect(await toThenable(new Expression('">"', defaultOperators).value(ctx))).to.equal('>') + expect(await toThenable(new Expression('">"', defaultOperators, trie).value(ctx))).to.equal('>') }) it('should evaluate from right to left', async function () { - expect(await toThenable(new Expression('true or false and false', defaultOperators).value(ctx))).to.equal(true) - expect(await toThenable(new Expression('true and false and false or true', defaultOperators).value(ctx))).to.equal(false) + expect(await toThenable(new Expression('true or false and false', defaultOperators, trie).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('true and false and false or true', defaultOperators, trie).value(ctx))).to.equal(false) }) it('should recognize property access', async function () { const ctx = new Context({ obj: { foo: true } }) - expect(await toThenable(new Expression('obj["foo"] and true', defaultOperators).value(ctx))).to.equal(true) + expect(await toThenable(new Expression('obj["foo"] and true', defaultOperators, trie).value(ctx))).to.equal(true) }) it('should allow nested property access', async function () { const ctx = new Context({ obj: { foo: 'FOO' }, keys: { "what's this": 'foo' } }) - expect(await toThenable(new Expression('obj[keys["what\'s this"]]', defaultOperators).value(ctx))).to.equal('FOO') + expect(await toThenable(new Expression('obj[keys["what\'s this"]]', defaultOperators, trie).value(ctx))).to.equal('FOO') }) }) }) diff --git a/test/unit/template/output.ts b/test/unit/template/output.ts index 52aebaaecd..d5849fe7f6 100644 --- a/test/unit/template/output.ts +++ b/test/unit/template/output.ts @@ -5,12 +5,16 @@ import { Output } from '../../../src/template/output' import { OutputToken } from '../../../src/tokens/output-token' import { FilterMap } from '../../../src/template/filter/filter-map' import { defaultOptions } from '../../../src/liquid-options' +import { createTrie } from '../../../src/util/operator-trie' +import { defaultOperators } from '../../../src/types' const expect = chai.expect describe('Output', function () { const emitter: any = { write: (html: string) => (emitter.html += html), html: '' } - const liquid = {} as any + const liquid = { + options: { operatorsTrie: createTrie(defaultOperators) } + } as any let filters: FilterMap beforeEach(function () { filters = new FilterMap(false, liquid) diff --git a/test/unit/template/value.ts b/test/unit/template/value.ts index fa20ad2cf8..8c88e02bd6 100644 --- a/test/unit/template/value.ts +++ b/test/unit/template/value.ts @@ -6,13 +6,17 @@ import * as sinonChai from 'sinon-chai' import * as sinon from 'sinon' import { Context } from '../../../src/context/context' import { Value } from '../../../src/template/value' +import { createTrie } from '../../../src/util/operator-trie' +import { defaultOperators } from '../../../src/types' chai.use(sinonChai) const expect = chai.expect describe('Value', function () { - const liquid = {} as any + const liquid = { + options: { operatorsTrie: createTrie(defaultOperators) } + } as any describe('#constructor()', function () { const filterMap = new FilterMap(false, liquid)