mirror of
https://gitea.com/gitea/act_runner.git
synced 2026-03-22 06:45:03 +01:00
Replace expressions engine (#133)
This commit is contained in:
27
internal/expr/expression_parse_test.go
Normal file
27
internal/expr/expression_parse_test.go
Normal file
@@ -0,0 +1,27 @@
|
||||
package workflow
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestExpressionParser(t *testing.T) {
|
||||
node, err := Parse("github.event_name")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
t.Logf("Parsed expression: %+v", node)
|
||||
}
|
||||
|
||||
func TestExpressionParserWildcard(t *testing.T) {
|
||||
node, err := Parse("github.commits.*.message")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
t.Logf("Parsed expression: %+v", node)
|
||||
}
|
||||
|
||||
func TestExpressionParserDot(t *testing.T) {
|
||||
node, err := Parse("github.head_commit.message")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
t.Logf("Parsed expression: %+v", node)
|
||||
}
|
||||
306
internal/expr/expression_parser.go
Normal file
306
internal/expr/expression_parser.go
Normal file
@@ -0,0 +1,306 @@
|
||||
package workflow
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Node represents a node in the expression tree.
|
||||
// It is intentionally minimal – only the fields needed for the parser.
|
||||
// Users can extend it with more information if required.
|
||||
|
||||
type Node interface {
|
||||
String() string
|
||||
}
|
||||
|
||||
// ValueNode represents a literal value (number, string, boolean, null) or a named value.
|
||||
// The Kind field indicates the type.
|
||||
// For named values the Value is nil.
|
||||
|
||||
type ValueNode struct {
|
||||
Kind TokenKind
|
||||
Value interface{}
|
||||
}
|
||||
|
||||
// FunctionNode represents a function call with arguments.
|
||||
|
||||
type FunctionNode struct {
|
||||
Name string
|
||||
Args []Node
|
||||
}
|
||||
|
||||
// BinaryNode represents a binary operator.
|
||||
|
||||
type BinaryNode struct {
|
||||
Op string
|
||||
Left Node
|
||||
Right Node
|
||||
}
|
||||
|
||||
// UnaryNode represents a unary operator.
|
||||
|
||||
type UnaryNode struct {
|
||||
Op string
|
||||
Operand Node
|
||||
}
|
||||
|
||||
// Parser holds the lexer and the stacks used by the shunting‑yard algorithm.
|
||||
|
||||
type Parser struct {
|
||||
lexer *Lexer
|
||||
tokens []Token
|
||||
pos int
|
||||
ops []OpToken
|
||||
vals []Node
|
||||
}
|
||||
|
||||
type OpToken struct {
|
||||
Token
|
||||
StartPos int
|
||||
}
|
||||
|
||||
func precedence(tkn Token) int {
|
||||
switch tkn.Kind {
|
||||
case TokenKindStartGroup:
|
||||
return 20
|
||||
case TokenKindStartIndex, TokenKindStartParameters, TokenKindDereference:
|
||||
return 19
|
||||
case TokenKindLogicalOperator:
|
||||
switch tkn.Raw {
|
||||
case "!":
|
||||
return 16
|
||||
case ">", ">=", "<", "<=":
|
||||
return 11
|
||||
case "==", "!=":
|
||||
return 10
|
||||
case "&&":
|
||||
return 6
|
||||
case "||":
|
||||
return 5
|
||||
}
|
||||
case TokenKindEndGroup, TokenKindEndIndex, TokenKindEndParameters, TokenKindSeparator:
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Parse parses the expression and returns the root node.
|
||||
func Parse(expression string) (Node, error) {
|
||||
lexer := NewLexer(expression, 0)
|
||||
p := &Parser{}
|
||||
// Tokenise all tokens
|
||||
if err := p.initWithLexer(lexer); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p.parse()
|
||||
}
|
||||
|
||||
func (p *Parser) parse() (Node, error) {
|
||||
// Shunting‑yard algorithm
|
||||
for p.pos < len(p.tokens) {
|
||||
tok := p.tokens[p.pos]
|
||||
p.pos++
|
||||
switch tok.Kind {
|
||||
case TokenKindNumber, TokenKindString, TokenKindBoolean, TokenKindNull:
|
||||
p.pushValue(&ValueNode{Kind: tok.Kind, Value: tok.Value})
|
||||
case TokenKindNamedValue, TokenKindPropertyName, TokenKindWildcard:
|
||||
p.pushValue(&ValueNode{Kind: tok.Kind, Value: tok.Raw})
|
||||
case TokenKindFunction:
|
||||
p.pushFunc(tok, len(p.vals))
|
||||
case TokenKindStartParameters, TokenKindStartGroup, TokenKindStartIndex, TokenKindLogicalOperator, TokenKindDereference:
|
||||
if err := p.pushOp(tok); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case TokenKindSeparator:
|
||||
if err := p.popGroup(TokenKindStartParameters); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case TokenKindEndParameters:
|
||||
if err := p.pushFuncValue(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case TokenKindEndGroup:
|
||||
if err := p.popGroup(TokenKindStartGroup); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.ops = p.ops[:len(p.ops)-1]
|
||||
case TokenKindEndIndex:
|
||||
if err := p.popGroup(TokenKindStartIndex); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// pop the start parameters
|
||||
p.ops = p.ops[:len(p.ops)-1]
|
||||
right := p.vals[len(p.vals)-1]
|
||||
p.vals = p.vals[:len(p.vals)-1]
|
||||
left := p.vals[len(p.vals)-1]
|
||||
p.vals = p.vals[:len(p.vals)-1]
|
||||
p.vals = append(p.vals, &BinaryNode{Op: "[", Left: left, Right: right})
|
||||
}
|
||||
}
|
||||
for len(p.ops) > 0 {
|
||||
if err := p.popOp(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if len(p.vals) != 1 {
|
||||
return nil, errors.New("invalid expression")
|
||||
}
|
||||
return p.vals[0], nil
|
||||
}
|
||||
|
||||
func (p *Parser) pushFuncValue() error {
|
||||
if err := p.popGroup(TokenKindStartParameters); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// pop the start parameters
|
||||
p.ops = p.ops[:len(p.ops)-1]
|
||||
// create function node
|
||||
fnTok := p.ops[len(p.ops)-1]
|
||||
if fnTok.Kind != TokenKindFunction {
|
||||
return errors.New("expected function token")
|
||||
}
|
||||
p.ops = p.ops[:len(p.ops)-1]
|
||||
// collect arguments
|
||||
args := []Node{}
|
||||
for len(p.vals) > fnTok.StartPos {
|
||||
args = append([]Node{p.vals[len(p.vals)-1]}, args...)
|
||||
p.vals = p.vals[:len(p.vals)-1]
|
||||
}
|
||||
p.pushValue(&FunctionNode{Name: fnTok.Raw, Args: args})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) initWithLexer(lexer *Lexer) error {
|
||||
p.lexer = lexer
|
||||
for {
|
||||
tok := lexer.Next()
|
||||
if tok == nil {
|
||||
break
|
||||
}
|
||||
if tok.Kind == TokenKindUnexpected {
|
||||
return fmt.Errorf("unexpected token %s at position %d", tok.Raw, tok.Index)
|
||||
}
|
||||
p.tokens = append(p.tokens, *tok)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) popGroup(kind TokenKind) error {
|
||||
for len(p.ops) > 0 && p.ops[len(p.ops)-1].Kind != kind {
|
||||
if err := p.popOp(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(p.ops) == 0 {
|
||||
return errors.New("mismatched parentheses")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) pushValue(v Node) {
|
||||
p.vals = append(p.vals, v)
|
||||
}
|
||||
|
||||
func (p *Parser) pushOp(t Token) error {
|
||||
for len(p.ops) > 0 {
|
||||
top := p.ops[len(p.ops)-1]
|
||||
if precedence(top.Token) >= precedence(t) &&
|
||||
top.Kind != TokenKindStartGroup &&
|
||||
top.Kind != TokenKindStartIndex &&
|
||||
top.Kind != TokenKindStartParameters &&
|
||||
top.Kind != TokenKindSeparator {
|
||||
if err := p.popOp(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
p.ops = append(p.ops, OpToken{Token: t})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) pushFunc(t Token, start int) {
|
||||
p.ops = append(p.ops, OpToken{Token: t, StartPos: start})
|
||||
}
|
||||
|
||||
func (p *Parser) popOp() error {
|
||||
if len(p.ops) == 0 {
|
||||
return nil
|
||||
}
|
||||
op := p.ops[len(p.ops)-1]
|
||||
p.ops = p.ops[:len(p.ops)-1]
|
||||
switch op.Kind {
|
||||
case TokenKindLogicalOperator:
|
||||
if op.Raw == "!" {
|
||||
if len(p.vals) < 1 {
|
||||
return errors.New("insufficient operands")
|
||||
}
|
||||
right := p.vals[len(p.vals)-1]
|
||||
p.vals = p.vals[:len(p.vals)-1]
|
||||
p.vals = append(p.vals, &UnaryNode{Op: op.Raw, Operand: right})
|
||||
} else {
|
||||
if len(p.vals) < 2 {
|
||||
return errors.New("insufficient operands")
|
||||
}
|
||||
right := p.vals[len(p.vals)-1]
|
||||
left := p.vals[len(p.vals)-2]
|
||||
p.vals = p.vals[:len(p.vals)-2]
|
||||
p.vals = append(p.vals, &BinaryNode{Op: op.Raw, Left: left, Right: right})
|
||||
}
|
||||
case TokenKindStartParameters:
|
||||
// unary operator '!' handled elsewhere
|
||||
case TokenKindDereference:
|
||||
if len(p.vals) < 2 {
|
||||
return errors.New("insufficient operands")
|
||||
}
|
||||
right := p.vals[len(p.vals)-1]
|
||||
left := p.vals[len(p.vals)-2]
|
||||
p.vals = p.vals[:len(p.vals)-2]
|
||||
p.vals = append(p.vals, &BinaryNode{Op: ".", Left: left, Right: right})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// String returns a string representation of the node.
|
||||
func (n *ValueNode) String() string { return fmt.Sprintf("%v", n.Value) }
|
||||
|
||||
// String returns a string representation of the node.
|
||||
func (n *FunctionNode) String() string {
|
||||
return fmt.Sprintf("%s(%s)", n.Name, strings.Join(funcArgs(n.Args), ", "))
|
||||
}
|
||||
|
||||
func funcArgs(args []Node) []string {
|
||||
res := []string{}
|
||||
for _, a := range args {
|
||||
res = append(res, a.String())
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// String returns a string representation of the node.
|
||||
func (n *BinaryNode) String() string {
|
||||
return fmt.Sprintf("(%s %s %s)", n.Left.String(), n.Op, n.Right.String())
|
||||
}
|
||||
|
||||
// String returns a string representation of the node.
|
||||
func (n *UnaryNode) String() string { return fmt.Sprintf("(%s%s)", n.Op, n.Operand.String()) }
|
||||
|
||||
func VisitNode(exprNode Node, callback func(node Node)) {
|
||||
callback(exprNode)
|
||||
switch node := exprNode.(type) {
|
||||
case *FunctionNode:
|
||||
for _, arg := range node.Args {
|
||||
VisitNode(arg, callback)
|
||||
}
|
||||
case *UnaryNode:
|
||||
VisitNode(node.Operand, callback)
|
||||
case *BinaryNode:
|
||||
VisitNode(node.Left, callback)
|
||||
VisitNode(node.Right, callback)
|
||||
}
|
||||
}
|
||||
361
internal/expr/lexer.go
Normal file
361
internal/expr/lexer.go
Normal file
@@ -0,0 +1,361 @@
|
||||
package workflow
|
||||
|
||||
import (
|
||||
"math"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// TokenKind represents the type of token returned by the lexer.
|
||||
// The values mirror the C# TokenKind enum.
|
||||
//
|
||||
// Note: The names are kept identical to the C# implementation for
|
||||
// easier mapping when porting the parser.
|
||||
//
|
||||
// The lexer is intentionally simple – it only tokenises the subset of
|
||||
// expressions that are used in GitHub Actions workflow `if:` expressions.
|
||||
// It does not evaluate the expression – that is left to the parser.
|
||||
|
||||
type TokenKind int
|
||||
|
||||
const (
|
||||
TokenKindStartGroup TokenKind = iota
|
||||
TokenKindStartIndex
|
||||
TokenKindEndGroup
|
||||
TokenKindEndIndex
|
||||
TokenKindSeparator
|
||||
TokenKindDereference
|
||||
TokenKindWildcard
|
||||
TokenKindLogicalOperator
|
||||
TokenKindNumber
|
||||
TokenKindString
|
||||
TokenKindBoolean
|
||||
TokenKindNull
|
||||
TokenKindPropertyName
|
||||
TokenKindFunction
|
||||
TokenKindNamedValue
|
||||
TokenKindStartParameters
|
||||
TokenKindEndParameters
|
||||
TokenKindUnexpected
|
||||
)
|
||||
|
||||
// Token represents a single lexical token.
|
||||
// Raw holds the original text, Value holds the parsed value when applicable.
|
||||
// Index is the start position in the source string.
|
||||
//
|
||||
// The struct is intentionally minimal – it only contains what the parser
|
||||
// needs. If you need more information (e.g. token length) you can add it.
|
||||
|
||||
type Token struct {
|
||||
Kind TokenKind
|
||||
Raw string
|
||||
Value interface{}
|
||||
Index int
|
||||
}
|
||||
|
||||
// Lexer holds the state while tokenising an expression.
|
||||
// It is a direct port of the C# LexicalAnalyzer.
|
||||
//
|
||||
// Flags can be used to enable/disable features – for now we only support
|
||||
// a single flag that mirrors ExpressionFlags.DTExpressionsV1.
|
||||
//
|
||||
// The lexer is not thread‑safe – reuse a single instance per expression.
|
||||
|
||||
type Lexer struct {
|
||||
expr string
|
||||
flags int
|
||||
index int
|
||||
last *Token
|
||||
stack []TokenKind // unclosed start tokens
|
||||
}
|
||||
|
||||
// NewLexer creates a new lexer for the given expression.
|
||||
func NewLexer(expr string, flags int) *Lexer {
|
||||
return &Lexer{expr: expr, flags: flags}
|
||||
}
|
||||
|
||||
func testTokenBoundary(c rune) bool {
|
||||
switch c {
|
||||
case '(', '[', ')', ']', ',', '.',
|
||||
'!', '>', '<', '=', '&', '|':
|
||||
return true
|
||||
default:
|
||||
return unicode.IsSpace(c)
|
||||
}
|
||||
}
|
||||
|
||||
// Next returns the next token or nil if the end of the expression is reached.
|
||||
func (l *Lexer) Next() *Token {
|
||||
// Skip whitespace
|
||||
for l.index < len(l.expr) && unicode.IsSpace(rune(l.expr[l.index])) {
|
||||
l.index++
|
||||
}
|
||||
if l.index >= len(l.expr) {
|
||||
return nil
|
||||
}
|
||||
|
||||
c := l.expr[l.index]
|
||||
switch c {
|
||||
case '(':
|
||||
l.index++
|
||||
// Function call or logical grouping
|
||||
if l.last != nil && l.last.Kind == TokenKindFunction {
|
||||
return l.createToken(TokenKindStartParameters, "(")
|
||||
}
|
||||
if l.flags&FlagV1 != 0 {
|
||||
// V1 does not support grouping – treat as unexpected
|
||||
return l.createToken(TokenKindUnexpected, "(")
|
||||
}
|
||||
return l.createToken(TokenKindStartGroup, "(")
|
||||
case '[':
|
||||
l.index++
|
||||
return l.createToken(TokenKindStartIndex, "[")
|
||||
case ')':
|
||||
l.index++
|
||||
if len(l.stack) > 0 && l.stack[len(l.stack)-1] == TokenKindStartParameters {
|
||||
return l.createToken(TokenKindEndParameters, ")")
|
||||
}
|
||||
return l.createToken(TokenKindEndGroup, ")")
|
||||
case ']':
|
||||
l.index++
|
||||
return l.createToken(TokenKindEndIndex, "]")
|
||||
case ',':
|
||||
l.index++
|
||||
return l.createToken(TokenKindSeparator, ",")
|
||||
case '*':
|
||||
l.index++
|
||||
return l.createToken(TokenKindWildcard, "*")
|
||||
case '\'':
|
||||
return l.readString()
|
||||
case '!', '>', '<', '=', '&', '|':
|
||||
if l.flags&FlagV1 != 0 {
|
||||
l.index++
|
||||
return l.createToken(TokenKindUnexpected, string(c))
|
||||
}
|
||||
return l.readOperator()
|
||||
default:
|
||||
return l.defaultNext(c)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Lexer) defaultNext(c byte) *Token {
|
||||
if c == '.' {
|
||||
// Could be number or dereference
|
||||
if l.last == nil || l.last.Kind == TokenKindSeparator || l.last.Kind == TokenKindStartGroup || l.last.Kind == TokenKindStartIndex || l.last.Kind == TokenKindStartParameters || l.last.Kind == TokenKindLogicalOperator {
|
||||
return l.readNumber()
|
||||
}
|
||||
l.index++
|
||||
return l.createToken(TokenKindDereference, ".")
|
||||
}
|
||||
if c == '-' || c == '+' || unicode.IsDigit(rune(c)) {
|
||||
return l.readNumber()
|
||||
}
|
||||
return l.readKeyword()
|
||||
}
|
||||
|
||||
// Helper to create a token and update lexer state.
|
||||
func (l *Lexer) createToken(kind TokenKind, raw string) *Token {
|
||||
// Token order check
|
||||
if !l.checkLastToken(kind, raw) {
|
||||
// Illegal token sequence
|
||||
return &Token{Kind: TokenKindUnexpected, Raw: raw, Index: l.index}
|
||||
}
|
||||
tok := &Token{Kind: kind, Raw: raw, Index: l.index}
|
||||
l.last = tok
|
||||
// Manage stack for grouping
|
||||
switch kind {
|
||||
case TokenKindStartGroup, TokenKindStartIndex, TokenKindStartParameters:
|
||||
l.stack = append(l.stack, kind)
|
||||
case TokenKindEndGroup, TokenKindEndIndex, TokenKindEndParameters:
|
||||
if len(l.stack) > 0 {
|
||||
l.stack = l.stack[:len(l.stack)-1]
|
||||
}
|
||||
}
|
||||
return tok
|
||||
}
|
||||
|
||||
// nil last token represented by nil
|
||||
func (l *Lexer) getLastKind() *TokenKind {
|
||||
var lastKind *TokenKind
|
||||
if l.last != nil {
|
||||
lastKind = &l.last.Kind
|
||||
}
|
||||
return lastKind
|
||||
}
|
||||
|
||||
// checkLastToken verifies that the token sequence is legal based on the last token.
|
||||
func (l *Lexer) checkLastToken(kind TokenKind, raw string) bool {
|
||||
lastKind := l.getLastKind()
|
||||
|
||||
// Helper to check if lastKind is in allowed list
|
||||
allowed := func(allowedKinds ...TokenKind) bool {
|
||||
return lastKind != nil && slices.Contains(allowedKinds, *lastKind)
|
||||
}
|
||||
// For nil last, we treat as no previous token
|
||||
// Define allowed previous kinds for each token kind
|
||||
switch kind {
|
||||
case TokenKindStartGroup:
|
||||
return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartGroup, TokenKindStartParameters, TokenKindStartIndex, TokenKindLogicalOperator)
|
||||
case TokenKindStartIndex:
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindStartParameters:
|
||||
return allowed(TokenKindFunction)
|
||||
case TokenKindEndGroup:
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindEndIndex:
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindEndParameters:
|
||||
return allowed(TokenKindStartParameters, TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindSeparator:
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindWildcard:
|
||||
return allowed(TokenKindStartIndex, TokenKindDereference)
|
||||
case TokenKindDereference:
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindLogicalOperator:
|
||||
if raw == "!" { // "!"
|
||||
return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartGroup, TokenKindStartParameters, TokenKindStartIndex, TokenKindLogicalOperator)
|
||||
}
|
||||
return allowed(TokenKindEndGroup, TokenKindEndParameters, TokenKindEndIndex, TokenKindWildcard, TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString, TokenKindPropertyName, TokenKindNamedValue)
|
||||
case TokenKindNull, TokenKindBoolean, TokenKindNumber, TokenKindString:
|
||||
return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartIndex, TokenKindStartGroup, TokenKindStartParameters, TokenKindLogicalOperator)
|
||||
case TokenKindPropertyName:
|
||||
return allowed(TokenKindDereference)
|
||||
case TokenKindFunction, TokenKindNamedValue:
|
||||
return lastKind == nil || allowed(TokenKindSeparator, TokenKindStartIndex, TokenKindStartGroup, TokenKindStartParameters, TokenKindLogicalOperator)
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// readNumber parses a numeric literal.
|
||||
func (l *Lexer) readNumber() *Token {
|
||||
start := l.index
|
||||
periods := 0
|
||||
for l.index < len(l.expr) {
|
||||
ch := l.expr[l.index]
|
||||
if ch == '.' {
|
||||
periods++
|
||||
}
|
||||
if testTokenBoundary(rune(ch)) && ch != '.' {
|
||||
break
|
||||
}
|
||||
l.index++
|
||||
}
|
||||
raw := l.expr[start:l.index]
|
||||
if len(raw) > 2 {
|
||||
switch raw[:2] {
|
||||
case "0x", "0o":
|
||||
tok := l.createToken(TokenKindNumber, raw)
|
||||
if i, err := strconv.ParseInt(raw, 0, 32); err == nil {
|
||||
tok.Value = float64(i)
|
||||
return tok
|
||||
}
|
||||
}
|
||||
}
|
||||
// Try to parse as float64
|
||||
var val interface{} = raw
|
||||
if f, err := strconv.ParseFloat(raw, 64); err == nil {
|
||||
val = f
|
||||
}
|
||||
tok := l.createToken(TokenKindNumber, raw)
|
||||
tok.Value = val
|
||||
return tok
|
||||
}
|
||||
|
||||
// readString parses a single‑quoted string literal.
|
||||
func (l *Lexer) readString() *Token {
|
||||
start := l.index
|
||||
l.index++ // skip opening quote
|
||||
var sb strings.Builder
|
||||
closed := false
|
||||
for l.index < len(l.expr) {
|
||||
ch := l.expr[l.index]
|
||||
l.index++
|
||||
if ch == '\'' {
|
||||
if l.index < len(l.expr) && l.expr[l.index] == '\'' {
|
||||
// escaped quote
|
||||
sb.WriteByte('\'')
|
||||
l.index++
|
||||
continue
|
||||
}
|
||||
closed = true
|
||||
break
|
||||
}
|
||||
sb.WriteByte(ch)
|
||||
}
|
||||
raw := l.expr[start:l.index]
|
||||
tok := l.createToken(TokenKindString, raw)
|
||||
if closed {
|
||||
tok.Value = sb.String()
|
||||
} else {
|
||||
tok.Kind = TokenKindUnexpected
|
||||
}
|
||||
return tok
|
||||
}
|
||||
|
||||
// readOperator parses logical operators (==, !=, >, >=, etc.).
|
||||
func (l *Lexer) readOperator() *Token {
|
||||
start := l.index
|
||||
l.index++
|
||||
if l.index < len(l.expr) {
|
||||
two := l.expr[start : l.index+1]
|
||||
switch two {
|
||||
case "!=", ">=", "<=", "==", "&&", "||":
|
||||
l.index++
|
||||
return l.createToken(TokenKindLogicalOperator, two)
|
||||
}
|
||||
}
|
||||
ch := l.expr[start]
|
||||
switch ch {
|
||||
case '!', '>', '<':
|
||||
return l.createToken(TokenKindLogicalOperator, string(ch))
|
||||
}
|
||||
return l.createToken(TokenKindUnexpected, string(ch))
|
||||
}
|
||||
|
||||
// readKeyword parses identifiers, booleans, null, etc.
|
||||
func (l *Lexer) readKeyword() *Token {
|
||||
start := l.index
|
||||
for l.index < len(l.expr) && !unicode.IsSpace(rune(l.expr[l.index])) && !strings.ContainsRune("()[],.!<>==&|*", rune(l.expr[l.index])) {
|
||||
l.index++
|
||||
}
|
||||
raw := l.expr[start:l.index]
|
||||
if l.last != nil && l.last.Kind == TokenKindDereference {
|
||||
return l.createToken(TokenKindPropertyName, raw)
|
||||
}
|
||||
switch raw {
|
||||
case "true":
|
||||
tok := l.createToken(TokenKindBoolean, raw)
|
||||
tok.Value = true
|
||||
return tok
|
||||
case "false":
|
||||
tok := l.createToken(TokenKindBoolean, raw)
|
||||
tok.Value = false
|
||||
return tok
|
||||
case "null":
|
||||
return l.createToken(TokenKindNull, raw)
|
||||
case "NaN":
|
||||
tok := l.createToken(TokenKindNumber, raw)
|
||||
tok.Value = math.NaN()
|
||||
return tok
|
||||
case "Infinity":
|
||||
tok := l.createToken(TokenKindNumber, raw)
|
||||
tok.Value = math.Inf(1)
|
||||
return tok
|
||||
}
|
||||
if l.index < len(l.expr) && l.expr[l.index] == '(' {
|
||||
return l.createToken(TokenKindFunction, raw)
|
||||
}
|
||||
return l.createToken(TokenKindNamedValue, raw)
|
||||
}
|
||||
|
||||
// Flag constants – only V1 is used for now.
|
||||
const FlagV1 = 1
|
||||
|
||||
// UnclosedTokens returns the stack of unclosed start tokens.
|
||||
func (l *Lexer) UnclosedTokens() []TokenKind {
|
||||
return l.stack
|
||||
}
|
||||
112
internal/expr/lexer_additional_test.go
Normal file
112
internal/expr/lexer_additional_test.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package workflow
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// TestLexerMultiple runs a set of expressions through the lexer and
|
||||
// verifies that the produced token kinds and values match expectations.
|
||||
func TestLexerMultiple(t *testing.T) {
|
||||
cases := []struct {
|
||||
expr string
|
||||
expected []TokenKind
|
||||
values []interface{} // optional, nil if not checking values
|
||||
}{
|
||||
{
|
||||
expr: "github.event_name == 'push'",
|
||||
expected: []TokenKind{
|
||||
TokenKindNamedValue, // github
|
||||
TokenKindDereference,
|
||||
TokenKindPropertyName, // event_name
|
||||
TokenKindLogicalOperator, // ==
|
||||
TokenKindString, // 'push'
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: "github.event_name == 'push' && github.ref == 'refs/heads/main'",
|
||||
expected: []TokenKind{
|
||||
TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, TokenKindLogicalOperator, TokenKindString,
|
||||
TokenKindLogicalOperator, // &&
|
||||
TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, TokenKindLogicalOperator, TokenKindString,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: "contains(github.ref, 'refs/heads/')",
|
||||
expected: []TokenKind{
|
||||
TokenKindFunction, // contains
|
||||
TokenKindStartParameters,
|
||||
TokenKindNamedValue, TokenKindDereference, TokenKindPropertyName, // github.ref
|
||||
TokenKindSeparator,
|
||||
TokenKindString,
|
||||
TokenKindEndParameters,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: "matrix[0].name",
|
||||
expected: []TokenKind{
|
||||
TokenKindNamedValue, // matrix
|
||||
TokenKindStartIndex,
|
||||
TokenKindNumber,
|
||||
TokenKindEndIndex,
|
||||
TokenKindDereference,
|
||||
TokenKindPropertyName, // name
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: "github.*",
|
||||
expected: []TokenKind{
|
||||
TokenKindNamedValue, TokenKindDereference, TokenKindWildcard,
|
||||
},
|
||||
},
|
||||
{
|
||||
expr: "null",
|
||||
expected: []TokenKind{TokenKindNull},
|
||||
},
|
||||
{
|
||||
expr: "true",
|
||||
expected: []TokenKind{TokenKindBoolean},
|
||||
values: []interface{}{true},
|
||||
},
|
||||
{
|
||||
expr: "123",
|
||||
expected: []TokenKind{TokenKindNumber},
|
||||
values: []interface{}{123.0},
|
||||
},
|
||||
{
|
||||
expr: "(a && b)",
|
||||
expected: []TokenKind{TokenKindStartGroup, TokenKindNamedValue, TokenKindLogicalOperator, TokenKindNamedValue, TokenKindEndGroup},
|
||||
},
|
||||
{
|
||||
expr: "[1,2]", // Syntax Error
|
||||
expected: []TokenKind{TokenKindUnexpected, TokenKindNumber, TokenKindSeparator, TokenKindNumber, TokenKindEndIndex},
|
||||
},
|
||||
{
|
||||
expr: "'Hello i''s escaped'",
|
||||
expected: []TokenKind{TokenKindString},
|
||||
values: []interface{}{"Hello i's escaped"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
lexer := NewLexer(tc.expr, 0)
|
||||
var tokens []*Token
|
||||
for {
|
||||
tok := lexer.Next()
|
||||
if tok == nil {
|
||||
break
|
||||
}
|
||||
tokens = append(tokens, tok)
|
||||
}
|
||||
assert.Equal(t, len(tc.expected), len(tokens), "expression: %s", tc.expr)
|
||||
for i, kind := range tc.expected {
|
||||
assert.Equal(t, kind, tokens[i].Kind, "expr %s token %d", tc.expr, i)
|
||||
}
|
||||
if tc.values != nil {
|
||||
for i, val := range tc.values {
|
||||
assert.Equal(t, val, tokens[i].Value, "expr %s token %d value", tc.expr, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
56
internal/expr/lexer_test.go
Normal file
56
internal/expr/lexer_test.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package workflow
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestLexer(t *testing.T) {
|
||||
input := "github.event_name == 'push' && github.ref == 'refs/heads/main'"
|
||||
lexer := NewLexer(input, 0)
|
||||
var tokens []*Token
|
||||
for {
|
||||
tok := lexer.Next()
|
||||
if tok == nil || tok.Kind == TokenKindUnexpected {
|
||||
break
|
||||
}
|
||||
tokens = append(tokens, tok)
|
||||
}
|
||||
for i, tok := range tokens {
|
||||
t.Logf("Token %d: Kind=%v, Value=%v", i, tok.Kind, tok.Value)
|
||||
}
|
||||
assert.Equal(t, tokens[1].Kind, TokenKindDereference)
|
||||
}
|
||||
|
||||
func TestLexerNumbers(t *testing.T) {
|
||||
table := []struct {
|
||||
in string
|
||||
out interface{}
|
||||
}{
|
||||
{"-Infinity", math.Inf(-1)},
|
||||
{"Infinity", math.Inf(1)},
|
||||
{"2.5", float64(2.5)},
|
||||
{"3.3", float64(3.3)},
|
||||
{"1", float64(1)},
|
||||
{"-1", float64(-1)},
|
||||
{"0x34", float64(0x34)},
|
||||
{"0o34", float64(0o34)},
|
||||
}
|
||||
for _, cs := range table {
|
||||
lexer := NewLexer(cs.in, 0)
|
||||
var tokens []*Token
|
||||
for {
|
||||
tok := lexer.Next()
|
||||
if tok == nil || tok.Kind == TokenKindUnexpected {
|
||||
break
|
||||
}
|
||||
tokens = append(tokens, tok)
|
||||
}
|
||||
require.Len(t, tokens, 1)
|
||||
assert.Equal(t, cs.out, tokens[0].Value)
|
||||
assert.Equal(t, cs.in, tokens[0].Raw)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user