From 87b5d2a05d1a661ff1ac4f5f561935eff04add2e Mon Sep 17 00:00:00 2001 From: Nathaniel Cook Date: Tue, 22 Dec 2015 13:56:56 -0700 Subject: [PATCH] enable using TICKscript vars inside of lambda expressions fixes and tests regex support --- CHANGELOG.md | 10 ++ integrations/streamer_test.go | 76 +++++++++- tick/TICKscript.md | 5 +- tick/eval.go | 84 +++++++++++ tick/lex.go | 34 ++++- tick/lex_test.go | 41 ++++++ tick/parser.go | 54 +++++-- tick/parser_test.go | 268 +++++++++++++++++++++++++++++++++- 8 files changed, 543 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b3ff46db4..f0f8db14b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## v0.2.4 [unreleased] + +### Release Notes + +### Features +- [#107](https://github.com/influxdb/kapacitor/issues/107): Enable TICKscript variables to be defined and then referenced from lambda expressions. + Also fixes various bugs around using regexes. + +### Bugfixes + ## v0.2.3 [2015-12-22] ### Release Notes diff --git a/integrations/streamer_test.go b/integrations/streamer_test.go index 2dbbc693c..73fb97dfc 100644 --- a/integrations/streamer_test.go +++ b/integrations/streamer_test.go @@ -167,6 +167,8 @@ stream func TestStream_Window(t *testing.T) { var script = ` +var period = 10s +var every = 10s stream .from() .database('dbname') @@ -174,8 +176,8 @@ stream .measurement('cpu') .where(lambda: "host" == 'serverA') .window() - .period(10s) - .every(10s) + .period(period) + .every(every) .httpOut('TestStream_Window') ` @@ -245,6 +247,66 @@ stream testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er) } +func TestStream_VarWhereString(t *testing.T) { + + var script = ` +var serverStr = 'serverA' +stream + .from().measurement('cpu') + .where(lambda: "host" == serverStr ) + .window() + .period(10s) + .every(10s) + .mapReduce(influxql.count('value')) + .httpOut('TestStream_SimpleMR') +` + er := kapacitor.Result{ + Series: imodels.Rows{ + { + Name: "cpu", + Tags: nil, + Columns: []string{"time", "count"}, + Values: [][]interface{}{[]interface{}{ + time.Date(1971, 1, 1, 0, 0, 10, 0, time.UTC), + 10.0, + }}, + }, + }, + } + + testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er) +} + +func TestStream_VarWhereRegex(t *testing.T) { + + var script = ` +var serverPattern = /^serverA$/ +stream + .from().measurement('cpu') + .where(lambda: "host" =~ serverPattern ) + .window() + .period(10s) + .every(10s) + .mapReduce(influxql.count('value')) + .httpOut('TestStream_SimpleMR') +` + er := kapacitor.Result{ + Series: imodels.Rows{ + { + Name: "cpu", + Tags: nil, + Columns: []string{"time", "count"}, + Values: [][]interface{}{[]interface{}{ + time.Date(1971, 1, 1, 0, 0, 10, 0, time.UTC), + 10.0, + }}, + }, + }, + } + + testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er) +} + func TestStream_GroupBy(t *testing.T) { var script = ` @@ -1070,6 +1132,10 @@ func TestStream_Alert(t *testing.T) { defer ts.Close() var script = ` +var infoThreshold = 6.0 +var warnThreshold = 7.0 +var critThreshold = 8.0 + stream .from().measurement('cpu') .where(lambda: "host" == 'serverA') @@ -1080,9 +1146,9 @@ stream .mapReduce(influxql.count('idle')) .alert() .id('kapacitor/{{ .Name }}/{{ index .Tags "host" }}') - .info(lambda: "count" > 6.0) - .warn(lambda: "count" > 7.0) - .crit(lambda: "count" > 8.0) + .info(lambda: "count" > infoThreshold) + .warn(lambda: "count" > warnThreshold) + .crit(lambda: "count" > critThreshold) .post('` + ts.URL + `') ` diff --git a/tick/TICKscript.md b/tick/TICKscript.md index 39a04335e..c9c28be76 100644 --- a/tick/TICKscript.md +++ b/tick/TICKscript.md @@ -52,6 +52,7 @@ duration_lit = int_lit duration_unit . duration_unit = "u" | "ยต" | "ms" | "s" | "m" | "h" | "d" | "w" . string_lit = `'` { unicode_char } `'` . star_lit = "*" +regex_lit = `/` { unicode_char } `/` . operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" | "<" | "<=" | ">" | ">=" | "=~" | "!~" | @@ -60,14 +61,14 @@ operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" | Program = Statement { Statement } . Statement = Declaration | Expression . Declaration = "var" identifier "=" Expression . -Expression = identifier { Chain } | Function { Chain } . +Expression = identifier { Chain } | Function { Chain } | Primary . Chain = "." Function { Chain} | "." identifier { Chain } . Function = identifier "(" Parameters ")" . Parameters = { Parameter "," } [ Parameter ] . Parameter = Expression | "lambda:" LambdaExpr | Primary . Primary = "(" LambdaExpr ")" | number_lit | string_lit | boolean_lit | duration_lit | regex_lit | star_lit | - LFunc | Reference | "-" Primary | "!" Primary . + LFunc | identifier | Reference | "-" Primary | "!" Primary . Reference = `"` { unicode_char } `"` . LambdaExpr = Primary operator_lit Primary . LFunc = identifier "(" LParameters ")" diff --git a/tick/eval.go b/tick/eval.go index bb8ec1ad5..9a8c485d8 100644 --- a/tick/eval.go +++ b/tick/eval.go @@ -4,8 +4,10 @@ package tick import ( "fmt" "reflect" + "regexp" "runtime" "strings" + "time" "unicode" "unicode/utf8" ) @@ -60,6 +62,20 @@ func eval(n Node, scope *Scope, stck *stack) (err error) { } evalUnary(node.Operator, scope, stck) case *LambdaNode: + // Catch panic from resolveIdents and return as error. + err = func() (e error) { + defer func(ep *error) { + err := recover() + if err != nil { + *ep = err.(error) + } + }(&e) + node.Node = resolveIdents(node.Node, scope) + return e + }() + if err != nil { + return + } stck.Push(node.Node) case *BinaryNode: err = eval(node.Left, scope, stck) @@ -260,3 +276,71 @@ func capilatizeFirst(s string) string { s = string(unicode.ToUpper(r)) + s[n:] return s } + +// Resolve all identifiers immediately in the tree with their value from the scope. +// This operation is performed in place. +// Panics if the scope value does not exist or if the value cannot be expressed as a literal. +func resolveIdents(n Node, scope *Scope) Node { + switch node := n.(type) { + case *IdentifierNode: + v, err := scope.Get(node.Ident) + if err != nil { + panic(err) + } + return valueToLiteralNode(node.pos, v) + case *UnaryNode: + node.Node = resolveIdents(node.Node, scope) + case *BinaryNode: + node.Left = resolveIdents(node.Left, scope) + node.Right = resolveIdents(node.Right, scope) + case *FunctionNode: + for i, arg := range node.Args { + node.Args[i] = resolveIdents(arg, scope) + } + case *ListNode: + for i, n := range node.Nodes { + node.Nodes[i] = resolveIdents(n, scope) + } + } + return n +} + +// Convert raw value to literal node, for all supported basic types. +func valueToLiteralNode(pos pos, v interface{}) Node { + switch value := v.(type) { + case bool: + return &BoolNode{ + pos: pos, + Bool: value, + } + case int64: + return &NumberNode{ + pos: pos, + IsInt: true, + Int64: value, + } + case float64: + return &NumberNode{ + pos: pos, + IsFloat: true, + Float64: value, + } + case time.Duration: + return &DurationNode{ + pos: pos, + Dur: value, + } + case string: + return &StringNode{ + pos: pos, + Literal: value, + } + case *regexp.Regexp: + return &RegexNode{ + pos: pos, + Regex: value, + } + default: + panic(fmt.Errorf("unsupported literal type %T", v)) + } +} diff --git a/tick/lex.go b/tick/lex.go index d20f71cad..1d3ac3d10 100644 --- a/tick/lex.go +++ b/tick/lex.go @@ -70,6 +70,7 @@ const ( ) var operatorStr = [...]string{ + tokenNot: "!", tokenPlus: "+", tokenMinus: "-", tokenMult: "*", @@ -124,6 +125,8 @@ func (t tokenType) String() string { return "number" case t == tokenString: return "string" + case t == tokenRegex: + return "regex" case t == tokenDot: return "." case t == tokenAsgn: @@ -165,6 +168,10 @@ type token struct { val string } +func (t token) String() string { + return fmt.Sprintf("{%v pos: %d val: %s}", t.typ, t.pos, t.val) +} + // lexer holds the state of the scanner. type lexer struct { input string // the string being scanned. @@ -254,6 +261,14 @@ func (l *lexer) ignore() { l.start = l.pos } +// ignore a contiguous block of spaces. +func (l *lexer) ignoreSpace() { + for isSpace(l.next()) { + l.ignore() + } + l.backup() +} + // expect the next rune to be r func (l *lexer) expect(r rune) bool { if l.peek() == r { @@ -279,9 +294,6 @@ func lexToken(l *lexer) stateFn { case r == '\'': l.backup() return lexSingleOrTripleString - case r == '/': - l.backup() - return lexRegex case isSpace(r): l.ignore() case r == '(': @@ -333,6 +345,12 @@ func lexOperator(l *lexer) stateFn { } op := strToOperator[l.current()] l.emit(op) + if op == tokenRegexNotEqual { + l.ignoreSpace() + if l.peek() == '/' { + return lexRegex + } + } return lexToken case '>', '<': if l.peek() == '=' { @@ -346,8 +364,18 @@ func lexOperator(l *lexer) stateFn { l.next() op := strToOperator[l.current()] l.emit(op) + if op == tokenRegexEqual { + l.ignoreSpace() + if l.peek() == '/' { + return lexRegex + } + } } else { l.emit(tokenAsgn) + l.ignoreSpace() + if l.peek() == '/' { + return lexRegex + } } return lexToken } diff --git a/tick/lex_test.go b/tick/lex_test.go index a70d7b370..e176bba65 100644 --- a/tick/lex_test.go +++ b/tick/lex_test.go @@ -33,6 +33,13 @@ func TestLexer(t *testing.T) { cases := []testCase{ //Symbols + Operators + { + in: "!", + tokens: []token{ + token{tokenNot, 0, "!"}, + token{tokenEOF, 1, ""}, + }, + }, { in: "+", tokens: []token{ @@ -370,6 +377,40 @@ func TestLexer(t *testing.T) { token{tokenEOF, 9, ""}, }, }, + // Regex -- can only be lexed within context + { + in: `=~ //`, + tokens: []token{ + token{tokenRegexEqual, 0, "=~"}, + token{tokenRegex, 3, "//"}, + token{tokenEOF, 5, ""}, + }, + }, + { + in: `!~ //`, + tokens: []token{ + token{tokenRegexNotEqual, 0, "!~"}, + token{tokenRegex, 3, "//"}, + token{tokenEOF, 5, ""}, + }, + }, + { + in: `= //`, + tokens: []token{ + token{tokenAsgn, 0, "="}, + token{tokenRegex, 2, "//"}, + token{tokenEOF, 4, ""}, + }, + }, + { + in: `= /^((.*)[a-z]+\S{0,2})|cat\/\/$/`, + tokens: []token{ + token{tokenAsgn, 0, "="}, + token{tokenRegex, 2, `/^((.*)[a-z]+\S{0,2})|cat\/\/$/`}, + token{tokenEOF, 33, ""}, + }, + }, + //Space { in: " ", diff --git a/tick/parser.go b/tick/parser.go index 23b1e02d2..85c13bbce 100644 --- a/tick/parser.go +++ b/tick/parser.go @@ -87,10 +87,18 @@ func (p *parser) unexpected(tok token, expected ...tokenType) { if start < 0 { start = 0 } + // Skip any new lines just show a single line + if i := strings.LastIndexByte(p.Text[start:tok.pos], '\n'); i != -1 { + start = start + i + 1 + } stop := tok.pos + bufSize if stop > len(p.Text) { stop = len(p.Text) } + // Skip any new lines just show a single line + if i := strings.IndexByte(p.Text[tok.pos:stop], '\n'); i != -1 { + stop = tok.pos + i + } line, char := p.lex.lineNumber(tok.pos) expectedStrs := make([]string, len(expected)) for i := range expected { @@ -187,8 +195,13 @@ func (p *parser) vr() Node { //parse an expression func (p *parser) expression() Node { - term := p.funcOrIdent() - return p.chain(term) + switch p.peek().typ { + case tokenIdent: + term := p.funcOrIdent() + return p.chain(term) + default: + return p.primary() + } } //parse a function or identifier invocation chain @@ -267,19 +280,21 @@ func (p *parser) lambdaExpr() Node { // Operator Precedence parsing var precedence = [...]int{ - tokenOr: 0, - tokenAnd: 1, - tokenEqual: 2, - tokenNotEqual: 2, - tokenGreater: 3, - tokenGreaterEqual: 3, - tokenLess: 3, - tokenLessEqual: 3, - tokenPlus: 4, - tokenMinus: 4, - tokenMult: 5, - tokenDiv: 5, - tokenMod: 5, + tokenOr: 0, + tokenAnd: 1, + tokenEqual: 2, + tokenNotEqual: 2, + tokenRegexEqual: 2, + tokenRegexNotEqual: 2, + tokenGreater: 3, + tokenGreaterEqual: 3, + tokenLess: 3, + tokenLessEqual: 3, + tokenPlus: 4, + tokenMinus: 4, + tokenMult: 5, + tokenDiv: 5, + tokenMod: 5, } // parse the expression considering operator precedence. @@ -356,8 +371,15 @@ func (p *parser) primary() Node { case tok.typ == tokenReference: return p.reference() case tok.typ == tokenIdent: - return p.lfunction() + p.next() + if p.peek().typ == tokenLParen { + p.backup() + return p.lfunction() + } + p.backup() + return p.identifier() case tok.typ == tokenMinus, tok.typ == tokenNot: + p.next() return newUnary(tok, p.primary()) default: p.unexpected( diff --git a/tick/parser_test.go b/tick/parser_test.go index 66f488129..1a84b9efa 100644 --- a/tick/parser_test.go +++ b/tick/parser_test.go @@ -2,6 +2,7 @@ package tick import ( "reflect" + "regexp" "testing" "time" @@ -46,11 +47,11 @@ func TestParseErrors(t *testing.T) { cases := []testCase{ testCase{ Text: "a\n\n\nvar b = ", - Error: "parser: unexpected EOF line 4 char 9 in \"\n\nvar b = \". expected: \"identifier\"", + Error: `parser: unexpected EOF line 4 char 9 in "var b = ". expected: "number","string","duration","identifier","TRUE","FALSE","==","(","-","!"`, }, testCase{ - Text: "a\n\n\nvar b = stream.window()var period", - Error: `parser: unexpected EOF line 4 char 34 in "var period". expected: "="`, + Text: "a\n\n\nvar b = stream.window()var period)\n\nvar x = 1", + Error: `parser: unexpected ) line 4 char 34 in "var period)". expected: "="`, }, testCase{ Text: "a\n\n\nvar b = stream.window(\nb.period(10s)", @@ -126,6 +127,216 @@ func TestParseStatements(t *testing.T) { Root Node err error }{ + { + script: `var x = 'str'`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &StringNode{ + pos: 8, + Literal: "str", + }, + }, + }, + }, + }, + { + script: `var x = TRUE`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &BoolNode{ + pos: 8, + Bool: true, + }, + }, + }, + }, + }, + { + script: `var x = !FALSE`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &UnaryNode{ + pos: 8, + Operator: tokenNot, + Node: &BoolNode{ + pos: 9, + Bool: false, + }, + }, + }, + }, + }, + }, + { + script: `var x = 1`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &NumberNode{ + pos: 8, + IsInt: true, + Int64: 1, + }, + }, + }, + }, + }, + { + script: `var x = -1`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &UnaryNode{ + pos: 8, + Operator: tokenMinus, + Node: &NumberNode{ + pos: 9, + IsInt: true, + Int64: 1, + }, + }, + }, + }, + }, + }, + { + script: `var x = 1.0`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &NumberNode{ + pos: 8, + IsFloat: true, + Float64: 1.0, + }, + }, + }, + }, + }, + { + script: `var x = -1.0`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &UnaryNode{ + pos: 8, + Operator: tokenMinus, + Node: &NumberNode{ + pos: 9, + IsFloat: true, + Float64: 1.0, + }, + }, + }, + }, + }, + }, + { + script: `var x = 5h`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &DurationNode{ + pos: 8, + Dur: time.Hour * 5, + }, + }, + }, + }, + }, + { + script: `var x = -5h`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &UnaryNode{ + pos: 8, + Operator: tokenMinus, + Node: &DurationNode{ + pos: 9, + Dur: time.Hour * 5, + }, + }, + }, + }, + }, + }, + { + script: `var x = /.*\//`, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "x", + }, + Right: &RegexNode{ + pos: 8, + Regex: regexp.MustCompile(".*/"), + }, + }, + }, + }, + }, { script: `var x = a.f()`, Root: &ListNode{ @@ -153,6 +364,57 @@ func TestParseStatements(t *testing.T) { }, }, }, + { + script: `var t = 42 + stream.where(lambda: "value" > t) + `, + Root: &ListNode{ + Nodes: []Node{ + &BinaryNode{ + pos: 6, + Operator: tokenAsgn, + Left: &IdentifierNode{ + pos: 4, + Ident: "t", + }, + Right: &NumberNode{ + pos: 8, + IsInt: true, + Int64: 42, + }, + }, + &BinaryNode{ + pos: 20, + Operator: tokenDot, + Left: &IdentifierNode{ + pos: 14, + Ident: "stream", + }, + Right: &FunctionNode{ + pos: 21, + Func: "where", + Args: []Node{ + &LambdaNode{ + pos: 27, + Node: &BinaryNode{ + pos: 43, + Operator: tokenGreater, + Left: &ReferenceNode{ + pos: 35, + Reference: "value", + }, + Right: &IdentifierNode{ + pos: 45, + Ident: "t", + }, + }, + }, + }, + }, + }, + }, + }, + }, { script: ` var x = stream