Skip to content

Commit

Permalink
enable using TICKscript vars inside of lambda expressions
Browse files Browse the repository at this point in the history
  • Loading branch information
nathanielc committed Dec 22, 2015
1 parent 7ea4a54 commit 1cedd20
Show file tree
Hide file tree
Showing 8 changed files with 379 additions and 11 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
# Changelog

## v0.2.4 [unreleased]

### Release Notes

### Features
- [#107](https://github.com/influxdb/kapacitor/issues/107): Enable TICKscript variables to be defined and then referenced from lambda expressions.

### Bugfixes

## v0.2.3 [2015-12-22]

### Release Notes
Expand Down
10 changes: 7 additions & 3 deletions integrations/streamer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1070,6 +1070,10 @@ func TestStream_Alert(t *testing.T) {
defer ts.Close()

var script = `
var infoThreshold = 6.0
var warnThreshold = 7.0
var critThreshold = 8.0
stream
.from().measurement('cpu')
.where(lambda: "host" == 'serverA')
Expand All @@ -1080,9 +1084,9 @@ stream
.mapReduce(influxql.count('idle'))
.alert()
.id('kapacitor/{{ .Name }}/{{ index .Tags "host" }}')
.info(lambda: "count" > 6.0)
.warn(lambda: "count" > 7.0)
.crit(lambda: "count" > 8.0)
.info(lambda: "count" > infoThreshold)
.warn(lambda: "count" > warnThreshold)
.crit(lambda: "count" > critThreshold)
.post('` + ts.URL + `')
`

Expand Down
5 changes: 3 additions & 2 deletions tick/TICKscript.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ duration_lit = int_lit duration_unit .
duration_unit = "u" | "µ" | "ms" | "s" | "m" | "h" | "d" | "w" .
string_lit = `'` { unicode_char } `'` .
star_lit = "*"
regex_lit = `/` { unicode_char } `/` .
operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" |
"<" | "<=" | ">" | ">=" | "=~" | "!~" |
Expand All @@ -60,14 +61,14 @@ operator_lit = "+" | "-" | "*" | "/" | "==" | "!=" |
Program = Statement { Statement } .
Statement = Declaration | Expression .
Declaration = "var" identifier "=" Expression .
Expression = identifier { Chain } | Function { Chain } .
Expression = identifier { Chain } | Function { Chain } | Primary .
Chain = "." Function { Chain} | "." identifier { Chain } .
Function = identifier "(" Parameters ")" .
Parameters = { Parameter "," } [ Parameter ] .
Parameter = Expression | "lambda:" LambdaExpr | Primary .
Primary = "(" LambdaExpr ")" | number_lit | string_lit |
boolean_lit | duration_lit | regex_lit | star_lit |
LFunc | Reference | "-" Primary | "!" Primary .
LFunc | identifier | Reference | "-" Primary | "!" Primary .
Reference = `"` { unicode_char } `"` .
LambdaExpr = Primary operator_lit Primary .
LFunc = identifier "(" LParameters ")"
Expand Down
84 changes: 84 additions & 0 deletions tick/eval.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@ package tick
import (
"fmt"
"reflect"
"regexp"
"runtime"
"strings"
"time"
"unicode"
"unicode/utf8"
)
Expand Down Expand Up @@ -60,6 +62,20 @@ func eval(n Node, scope *Scope, stck *stack) (err error) {
}
evalUnary(node.Operator, scope, stck)
case *LambdaNode:
// Catch panic from resolveIdents and return as error.
err = func() (e error) {
defer func(ep *error) {
err := recover()
if err != nil {
*ep = err.(error)
}
}(&e)
node.Node = resolveIdents(node.Node, scope)
return e
}()
if err != nil {
return
}
stck.Push(node.Node)
case *BinaryNode:
err = eval(node.Left, scope, stck)
Expand Down Expand Up @@ -260,3 +276,71 @@ func capilatizeFirst(s string) string {
s = string(unicode.ToUpper(r)) + s[n:]
return s
}

// Resolve all identifiers immediately in the tree with their value from the scope.
// This operation is performed in place.
// Panics if the scope value does not exist or if the value cannot be expressed as a literal.
func resolveIdents(n Node, scope *Scope) Node {
switch node := n.(type) {
case *IdentifierNode:
v, err := scope.Get(node.Ident)
if err != nil {
panic(err)
}
return valueToLiteralNode(node.pos, v)
case *UnaryNode:
node.Node = resolveIdents(node.Node, scope)
case *BinaryNode:
node.Left = resolveIdents(node.Left, scope)
node.Right = resolveIdents(node.Right, scope)
case *FunctionNode:
for i, arg := range node.Args {
node.Args[i] = resolveIdents(arg, scope)
}
case *ListNode:
for i, n := range node.Nodes {
node.Nodes[i] = resolveIdents(n, scope)
}
}
return n
}

// Convert raw value to literal node, for all supported basic types.
func valueToLiteralNode(pos pos, v interface{}) Node {
switch value := v.(type) {
case bool:
return &BoolNode{
pos: pos,
Bool: value,
}
case int64:
return &NumberNode{
pos: pos,
IsInt: true,
Int64: value,
}
case float64:
return &NumberNode{
pos: pos,
IsFloat: true,
Float64: value,
}
case time.Duration:
return &DurationNode{
pos: pos,
Dur: value,
}
case string:
return &StringNode{
pos: pos,
Literal: value,
}
case *regexp.Regexp:
return &RegexNode{
pos: pos,
Regex: value,
}
default:
panic(fmt.Errorf("unsupported literal type %T", v))
}
}
1 change: 1 addition & 0 deletions tick/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ const (
)

var operatorStr = [...]string{
tokenNot: "!",
tokenPlus: "+",
tokenMinus: "-",
tokenMult: "*",
Expand Down
7 changes: 7 additions & 0 deletions tick/lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ func TestLexer(t *testing.T) {

cases := []testCase{
//Symbols + Operators
{
in: "!",
tokens: []token{
token{tokenNot, 0, "!"},
token{tokenEOF, 1, ""},
},
},
{
in: "+",
tokens: []token{
Expand Down
26 changes: 23 additions & 3 deletions tick/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,18 @@ func (p *parser) unexpected(tok token, expected ...tokenType) {
if start < 0 {
start = 0
}
// Skip any new lines just show a single line
if i := strings.LastIndexByte(p.Text[start:tok.pos], '\n'); i != -1 {
start = start + i + 1
}
stop := tok.pos + bufSize
if stop > len(p.Text) {
stop = len(p.Text)
}
// Skip any new lines just show a single line
if i := strings.IndexByte(p.Text[tok.pos:stop], '\n'); i != -1 {
stop = tok.pos + i
}
line, char := p.lex.lineNumber(tok.pos)
expectedStrs := make([]string, len(expected))
for i := range expected {
Expand Down Expand Up @@ -187,8 +195,13 @@ func (p *parser) vr() Node {

//parse an expression
func (p *parser) expression() Node {
term := p.funcOrIdent()
return p.chain(term)
switch p.peek().typ {
case tokenIdent:
term := p.funcOrIdent()
return p.chain(term)
default:
return p.primary()
}
}

//parse a function or identifier invocation chain
Expand Down Expand Up @@ -356,8 +369,15 @@ func (p *parser) primary() Node {
case tok.typ == tokenReference:
return p.reference()
case tok.typ == tokenIdent:
return p.lfunction()
p.next()
if p.peek().typ == tokenLParen {
p.backup()
return p.lfunction()
}
p.backup()
return p.identifier()
case tok.typ == tokenMinus, tok.typ == tokenNot:
p.next()
return newUnary(tok, p.primary())
default:
p.unexpected(
Expand Down
Loading

0 comments on commit 1cedd20

Please sign in to comment.