Skip to content

Commit

Permalink
enable using TICKscript vars inside of lambda expressions
Browse files Browse the repository at this point in the history
fixes and tests regex support
  • Loading branch information
nathanielc committed Dec 22, 2015
1 parent 7ea4a54 commit 87b5d2a
Show file tree
Hide file tree
Showing 8 changed files with 543 additions and 29 deletions.
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# Changelog

## v0.2.4 [unreleased]

### Release Notes

### Features
- [#107](https://github.com/influxdb/kapacitor/issues/107): Enable TICKscript variables to be defined and then referenced from lambda expressions.
Also fixes various bugs around using regexes.

### Bugfixes

## v0.2.3 [2015-12-22]

### Release Notes
Expand Down
76 changes: 71 additions & 5 deletions integrations/streamer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -167,15 +167,17 @@ stream
func TestStream_Window(t *testing.T) {

var script = `
var period = 10s
var every = 10s
stream
.from()
.database('dbname')
.retentionPolicy('rpname')
.measurement('cpu')
.where(lambda: "host" == 'serverA')
.window()
.period(10s)
.every(10s)
.period(period)
.every(every)
.httpOut('TestStream_Window')
`

Expand Down Expand Up @@ -245,6 +247,66 @@ stream
testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er)
}

func TestStream_VarWhereString(t *testing.T) {

var script = `
var serverStr = 'serverA'
stream
.from().measurement('cpu')
.where(lambda: "host" == serverStr )
.window()
.period(10s)
.every(10s)
.mapReduce(influxql.count('value'))
.httpOut('TestStream_SimpleMR')
`
er := kapacitor.Result{
Series: imodels.Rows{
{
Name: "cpu",
Tags: nil,
Columns: []string{"time", "count"},
Values: [][]interface{}{[]interface{}{
time.Date(1971, 1, 1, 0, 0, 10, 0, time.UTC),
10.0,
}},
},
},
}

testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er)
}

func TestStream_VarWhereRegex(t *testing.T) {

var script = `
var serverPattern = /^serverA$/
stream
.from().measurement('cpu')
.where(lambda: "host" =~ serverPattern )
.window()
.period(10s)
.every(10s)
.mapReduce(influxql.count('value'))
.httpOut('TestStream_SimpleMR')
`
er := kapacitor.Result{
Series: imodels.Rows{
{
Name: "cpu",
Tags: nil,
Columns: []string{"time", "count"},
Values: [][]interface{}{[]interface{}{
time.Date(1971, 1, 1, 0, 0, 10, 0, time.UTC),
10.0,
}},
},
},
}

testStreamerWithOutput(t, "TestStream_SimpleMR", script, 15*time.Second, er)
}

func TestStream_GroupBy(t *testing.T) {

var script = `
Expand Down Expand Up @@ -1070,6 +1132,10 @@ func TestStream_Alert(t *testing.T) {
defer ts.Close()

var script = `
var infoThreshold = 6.0
var warnThreshold = 7.0
var critThreshold = 8.0
stream
.from().measurement('cpu')
.where(lambda: "host" == 'serverA')
Expand All @@ -1080,9 +1146,9 @@ stream
.mapReduce(influxql.count('idle'))
.alert()
.id('kapacitor/{{ .Name }}/{{ index .Tags "host" }}')
.info(lambda: "count" > 6.0)
.warn(lambda: "count" > 7.0)
.crit(lambda: "count" > 8.0)
.info(lambda: "count" > infoThreshold)
.warn(lambda: "count" > warnThreshold)
.crit(lambda: "count" > critThreshold)
.post('` + ts.URL + `')
`

Expand Down
5 changes: 3 additions & 2 deletions tick/TICKscript.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ duration_lit = int_lit duration_unit .
duration_unit = "u" | "µ" | "ms" | "s" | "m" | "h" | "d" | "w" .
string_lit = `'` { unicode_char } `'` .
star_lit = "*"
regex_lit = `/` { unicode_char } `/` .
operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" |
"<" | "<=" | ">" | ">=" | "=~" | "!~" |
Expand All @@ -60,14 +61,14 @@ operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" |
Program = Statement { Statement } .
Statement = Declaration | Expression .
Declaration = "var" identifier "=" Expression .
Expression = identifier { Chain } | Function { Chain } .
Expression = identifier { Chain } | Function { Chain } | Primary .
Chain = "." Function { Chain} | "." identifier { Chain } .
Function = identifier "(" Parameters ")" .
Parameters = { Parameter "," } [ Parameter ] .
Parameter = Expression | "lambda:" LambdaExpr | Primary .
Primary = "(" LambdaExpr ")" | number_lit | string_lit |
boolean_lit | duration_lit | regex_lit | star_lit |
LFunc | Reference | "-" Primary | "!" Primary .
LFunc | identifier | Reference | "-" Primary | "!" Primary .
Reference = `"` { unicode_char } `"` .
LambdaExpr = Primary operator_lit Primary .
LFunc = identifier "(" LParameters ")"
Expand Down
84 changes: 84 additions & 0 deletions tick/eval.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@ package tick
import (
"fmt"
"reflect"
"regexp"
"runtime"
"strings"
"time"
"unicode"
"unicode/utf8"
)
Expand Down Expand Up @@ -60,6 +62,20 @@ func eval(n Node, scope *Scope, stck *stack) (err error) {
}
evalUnary(node.Operator, scope, stck)
case *LambdaNode:
// Catch panic from resolveIdents and return as error.
err = func() (e error) {
defer func(ep *error) {
err := recover()
if err != nil {
*ep = err.(error)
}
}(&e)
node.Node = resolveIdents(node.Node, scope)
return e
}()
if err != nil {
return
}
stck.Push(node.Node)
case *BinaryNode:
err = eval(node.Left, scope, stck)
Expand Down Expand Up @@ -260,3 +276,71 @@ func capilatizeFirst(s string) string {
s = string(unicode.ToUpper(r)) + s[n:]
return s
}

// Resolve all identifiers immediately in the tree with their value from the scope.
// This operation is performed in place.
// Panics if the scope value does not exist or if the value cannot be expressed as a literal.
func resolveIdents(n Node, scope *Scope) Node {
switch node := n.(type) {
case *IdentifierNode:
v, err := scope.Get(node.Ident)
if err != nil {
panic(err)
}
return valueToLiteralNode(node.pos, v)
case *UnaryNode:
node.Node = resolveIdents(node.Node, scope)
case *BinaryNode:
node.Left = resolveIdents(node.Left, scope)
node.Right = resolveIdents(node.Right, scope)
case *FunctionNode:
for i, arg := range node.Args {
node.Args[i] = resolveIdents(arg, scope)
}
case *ListNode:
for i, n := range node.Nodes {
node.Nodes[i] = resolveIdents(n, scope)
}
}
return n
}

// Convert raw value to literal node, for all supported basic types.
func valueToLiteralNode(pos pos, v interface{}) Node {
switch value := v.(type) {
case bool:
return &BoolNode{
pos: pos,
Bool: value,
}
case int64:
return &NumberNode{
pos: pos,
IsInt: true,
Int64: value,
}
case float64:
return &NumberNode{
pos: pos,
IsFloat: true,
Float64: value,
}
case time.Duration:
return &DurationNode{
pos: pos,
Dur: value,
}
case string:
return &StringNode{
pos: pos,
Literal: value,
}
case *regexp.Regexp:
return &RegexNode{
pos: pos,
Regex: value,
}
default:
panic(fmt.Errorf("unsupported literal type %T", v))
}
}
34 changes: 31 additions & 3 deletions tick/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ const (
)

var operatorStr = [...]string{
tokenNot: "!",
tokenPlus: "+",
tokenMinus: "-",
tokenMult: "*",
Expand Down Expand Up @@ -124,6 +125,8 @@ func (t tokenType) String() string {
return "number"
case t == tokenString:
return "string"
case t == tokenRegex:
return "regex"
case t == tokenDot:
return "."
case t == tokenAsgn:
Expand Down Expand Up @@ -165,6 +168,10 @@ type token struct {
val string
}

func (t token) String() string {
return fmt.Sprintf("{%v pos: %d val: %s}", t.typ, t.pos, t.val)
}

// lexer holds the state of the scanner.
type lexer struct {
input string // the string being scanned.
Expand Down Expand Up @@ -254,6 +261,14 @@ func (l *lexer) ignore() {
l.start = l.pos
}

// ignore a contiguous block of spaces.
func (l *lexer) ignoreSpace() {
for isSpace(l.next()) {
l.ignore()
}
l.backup()
}

// expect the next rune to be r
func (l *lexer) expect(r rune) bool {
if l.peek() == r {
Expand All @@ -279,9 +294,6 @@ func lexToken(l *lexer) stateFn {
case r == '\'':
l.backup()
return lexSingleOrTripleString
case r == '/':
l.backup()
return lexRegex
case isSpace(r):
l.ignore()
case r == '(':
Expand Down Expand Up @@ -333,6 +345,12 @@ func lexOperator(l *lexer) stateFn {
}
op := strToOperator[l.current()]
l.emit(op)
if op == tokenRegexNotEqual {
l.ignoreSpace()
if l.peek() == '/' {
return lexRegex
}
}
return lexToken
case '>', '<':
if l.peek() == '=' {
Expand All @@ -346,8 +364,18 @@ func lexOperator(l *lexer) stateFn {
l.next()
op := strToOperator[l.current()]
l.emit(op)
if op == tokenRegexEqual {
l.ignoreSpace()
if l.peek() == '/' {
return lexRegex
}
}
} else {
l.emit(tokenAsgn)
l.ignoreSpace()
if l.peek() == '/' {
return lexRegex
}
}
return lexToken
}
Expand Down
41 changes: 41 additions & 0 deletions tick/lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ func TestLexer(t *testing.T) {

cases := []testCase{
//Symbols + Operators
{
in: "!",
tokens: []token{
token{tokenNot, 0, "!"},
token{tokenEOF, 1, ""},
},
},
{
in: "+",
tokens: []token{
Expand Down Expand Up @@ -370,6 +377,40 @@ func TestLexer(t *testing.T) {
token{tokenEOF, 9, ""},
},
},
// Regex -- can only be lexed within context
{
in: `=~ //`,
tokens: []token{
token{tokenRegexEqual, 0, "=~"},
token{tokenRegex, 3, "//"},
token{tokenEOF, 5, ""},
},
},
{
in: `!~ //`,
tokens: []token{
token{tokenRegexNotEqual, 0, "!~"},
token{tokenRegex, 3, "//"},
token{tokenEOF, 5, ""},
},
},
{
in: `= //`,
tokens: []token{
token{tokenAsgn, 0, "="},
token{tokenRegex, 2, "//"},
token{tokenEOF, 4, ""},
},
},
{
in: `= /^((.*)[a-z]+\S{0,2})|cat\/\/$/`,
tokens: []token{
token{tokenAsgn, 0, "="},
token{tokenRegex, 2, `/^((.*)[a-z]+\S{0,2})|cat\/\/$/`},
token{tokenEOF, 33, ""},
},
},

//Space
{
in: " ",
Expand Down
Loading

0 comments on commit 87b5d2a

Please sign in to comment.