// Package expr is a tiny expression grammar used by the filter bar.
// Grammar:
//
// expr := or
// or := and ('or' and)*
// and := cmp ('and' cmp)*
// cmp := primary (op primary)?
// primary := IDENT | NUMBER | STRING | '(' expr ')' | 'not' primary
// op := '=' | '!=' | '<' | '<=' | '>' | '>=' | '~'
//
// mercemay.top/src/httptap/
package expr
import (
"fmt"
"strconv"
"strings"
"unicode"
)
// Node is the parsed expression tree.
type Node struct {
Kind string // "and", "or", "not", "cmp", "ident", "string", "number"
Op string // comparison op for "cmp"
Children []*Node
Str string
Num float64
}
// Parse returns the tree for source or an error.
func Parse(source string) (*Node, error) {
p := &parser{toks: tokenize(source)}
n, err := p.parseOr()
if err != nil {
return nil, err
}
if p.pos < len(p.toks) {
return nil, fmt.Errorf("expr: unexpected trailing input %q", p.peek().raw)
}
return n, nil
}
type parser struct {
toks []token
pos int
}
func (p *parser) peek() token {
if p.pos >= len(p.toks) {
return token{kind: tokEOF}
}
return p.toks[p.pos]
}
func (p *parser) next() token { t := p.peek(); p.pos++; return t }
func (p *parser) parseOr() (*Node, error) {
left, err := p.parseAnd()
if err != nil {
return nil, err
}
for p.peek().kind == tokIdent && p.peek().raw == "or" {
p.next()
right, err := p.parseAnd()
if err != nil {
return nil, err
}
left = &Node{Kind: "or", Children: []*Node{left, right}}
}
return left, nil
}
func (p *parser) parseAnd() (*Node, error) {
left, err := p.parseCmp()
if err != nil {
return nil, err
}
for p.peek().kind == tokIdent && p.peek().raw == "and" {
p.next()
right, err := p.parseCmp()
if err != nil {
return nil, err
}
left = &Node{Kind: "and", Children: []*Node{left, right}}
}
return left, nil
}
func (p *parser) parseCmp() (*Node, error) {
if p.peek().kind == tokIdent && p.peek().raw == "not" {
p.next()
child, err := p.parsePrimary()
if err != nil {
return nil, err
}
return &Node{Kind: "not", Children: []*Node{child}}, nil
}
left, err := p.parsePrimary()
if err != nil {
return nil, err
}
if p.peek().kind == tokOp {
op := p.next().raw
right, err := p.parsePrimary()
if err != nil {
return nil, err
}
return &Node{Kind: "cmp", Op: op, Children: []*Node{left, right}}, nil
}
return left, nil
}
func (p *parser) parsePrimary() (*Node, error) {
t := p.next()
switch t.kind {
case tokIdent:
return &Node{Kind: "ident", Str: t.raw}, nil
case tokString:
return &Node{Kind: "string", Str: t.raw}, nil
case tokNumber:
v, err := strconv.ParseFloat(t.raw, 64)
if err != nil {
return nil, err
}
return &Node{Kind: "number", Num: v}, nil
case tokLParen:
n, err := p.parseOr()
if err != nil {
return nil, err
}
if p.next().kind != tokRParen {
return nil, fmt.Errorf("expr: missing )")
}
return n, nil
default:
return nil, fmt.Errorf("expr: unexpected token %q", t.raw)
}
}
// isLetter is a tiny predicate used by the tokenizer to recognise the
// leading character of an identifier. Exported so eval.go can reuse it.
func isLetter(r rune) bool {
return r == '_' || r == ':' || r == '-' || unicode.IsLetter(r)
}
// trimQuotes strips a matching single or double quote pair.
func trimQuotes(s string) string {
if len(s) >= 2 && (s[0] == '"' || s[0] == '\'') && s[len(s)-1] == s[0] {
return s[1 : len(s)-1]
}
return strings.TrimSpace(s)
}