This commit is contained in:
2026-04-05 18:20:42 +03:00
commit 32737ee6d6
18 changed files with 2719 additions and 0 deletions

104
dsl/ast.go Normal file
View File

@@ -0,0 +1,104 @@
package dsl
import "time"
type File struct {
Build string
BuildLinux string
BuildWindows string
BuildDarwin string
Timeout time.Duration
Binary string // executable name produced by build (default: solution)
NormalizeCRLF bool // strip \r before matching stdout/stderr/outFiles
TrimTrailingWS bool // trim trailing whitespace on each line before matching
Groups []*Group
}
type Group struct {
Name string
Weight float64
Timeout time.Duration
Env map[string]string
Scoring ScoringMode
Wrapper string // exec wrapper command (e.g., "valgrind --error-exitcode=1")
Tests []*Test
Pattern *Pattern
}
type ScoringMode int
const (
ScoringPartial ScoringMode = iota // weight * passed/total (default)
ScoringAllOrNone // weight or 0
)
type Pattern struct {
InputGlob string
OutputGlob string
DirsGlob string
InputFile string
OutputFile string
}
func (p *Pattern) IsDirMode() bool {
return p.DirsGlob != ""
}
type Test struct {
Name string
Timeout time.Duration
Env map[string]string
Wrapper string
Stdin *string
Args []string
InFiles map[string]string
ExitCode *int
Stdout Matcher
Stderr Matcher
OutFiles map[string]string
}
type Matcher interface {
matcherNode()
}
type ExactMatcher struct {
Value string
}
func (ExactMatcher) matcherNode() {}
type ContainsMatcher struct {
Substr string
}
func (ContainsMatcher) matcherNode() {}
type RegexMatcher struct {
Pattern string
}
func (RegexMatcher) matcherNode() {}
type NumericEpsMatcher struct {
Epsilon float64
Value string
}
func (NumericEpsMatcher) matcherNode() {}
type AnyOrderMatcher struct {
Lines []string
}
func (AnyOrderMatcher) matcherNode() {}
type NoMatcher struct{}
func (NoMatcher) matcherNode() {}

387
dsl/lexer.go Normal file
View File

@@ -0,0 +1,387 @@
package dsl
import (
"fmt"
"strings"
"unicode"
)
type TokenType int
const (
TOKEN_STRING TokenType = iota
TOKEN_IDENT
TOKEN_FLOAT
TOKEN_INT
TOKEN_DURATION
TOKEN_LBRACE
TOKEN_RBRACE
TOKEN_LPAREN
TOKEN_RPAREN
TOKEN_ASSIGN
TOKEN_TILDE
TOKEN_EOF
)
func (t TokenType) String() string {
switch t {
case TOKEN_STRING:
return "STRING"
case TOKEN_IDENT:
return "IDENT"
case TOKEN_FLOAT:
return "FLOAT"
case TOKEN_INT:
return "INT"
case TOKEN_DURATION:
return "DURATION"
case TOKEN_LBRACE:
return "{"
case TOKEN_RBRACE:
return "}"
case TOKEN_LPAREN:
return "("
case TOKEN_RPAREN:
return ")"
case TOKEN_ASSIGN:
return "="
case TOKEN_TILDE:
return "~"
case TOKEN_EOF:
return "EOF"
default:
return "UNKNOWN"
}
}
type Token struct {
Type TokenType
Value string
Line int
Col int
}
func (t Token) String() string {
return fmt.Sprintf("Token(%s, %q, %d:%d)", t.Type, t.Value, t.Line, t.Col)
}
type Lexer struct {
src []rune
pos int
line int
col int
}
func NewLexer(src string) *Lexer {
return &Lexer{src: []rune(src), pos: 0, line: 1, col: 1}
}
func (l *Lexer) peek() (rune, bool) {
if l.pos >= len(l.src) {
return 0, false
}
return l.src[l.pos], true
}
func (l *Lexer) peekAt(offset int) (rune, bool) {
i := l.pos + offset
if i >= len(l.src) {
return 0, false
}
return l.src[i], true
}
func (l *Lexer) advance() rune {
ch := l.src[l.pos]
l.pos++
if ch == '\n' {
l.line++
l.col = 1
} else {
l.col++
}
return ch
}
func (l *Lexer) skipWhitespaceAndComments() {
for {
ch, ok := l.peek()
if !ok {
return
}
if ch == '/' {
next, ok2 := l.peekAt(1)
if ok2 && next == '/' {
for {
c, ok := l.peek()
if !ok || c == '\n' {
break
}
l.advance()
}
continue
}
}
if unicode.IsSpace(ch) {
l.advance()
continue
}
break
}
}
func (l *Lexer) Tokenize() ([]Token, error) {
var tokens []Token
for {
l.skipWhitespaceAndComments()
ch, ok := l.peek()
if !ok {
tokens = append(tokens, Token{Type: TOKEN_EOF, Line: l.line, Col: l.col})
break
}
line, col := l.line, l.col
switch {
case ch == '{':
l.advance()
tokens = append(tokens, Token{TOKEN_LBRACE, "{", line, col})
case ch == '}':
l.advance()
tokens = append(tokens, Token{TOKEN_RBRACE, "}", line, col})
case ch == '(':
l.advance()
tokens = append(tokens, Token{TOKEN_LPAREN, "(", line, col})
case ch == ')':
l.advance()
tokens = append(tokens, Token{TOKEN_RPAREN, ")", line, col})
case ch == '=':
l.advance()
tokens = append(tokens, Token{TOKEN_ASSIGN, "=", line, col})
case ch == '~':
l.advance()
tokens = append(tokens, Token{TOKEN_TILDE, "~", line, col})
case ch == '"':
// проверяем heredoc """
if l.isHeredocStart() {
s, err := l.readHeredoc()
if err != nil {
return nil, err
}
tokens = append(tokens, Token{TOKEN_STRING, s, line, col})
} else {
s, err := l.readString()
if err != nil {
return nil, err
}
tokens = append(tokens, Token{TOKEN_STRING, s, line, col})
}
case unicode.IsDigit(ch) || (ch == '-' && l.isNumberNext()):
tok, err := l.readNumberOrDuration(line, col)
if err != nil {
return nil, err
}
tokens = append(tokens, tok)
case unicode.IsLetter(ch) || ch == '_':
ident := l.readIdent()
tokens = append(tokens, Token{TOKEN_IDENT, ident, line, col})
default:
return nil, fmt.Errorf("%d:%d: unexpected character %q", line, col, ch)
}
}
return tokens, nil
}
func (l *Lexer) isHeredocStart() bool {
a, ok1 := l.peekAt(0)
b, ok2 := l.peekAt(1)
c, ok3 := l.peekAt(2)
return ok1 && ok2 && ok3 && a == '"' && b == '"' && c == '"'
}
func (l *Lexer) isNumberNext() bool {
next, ok := l.peekAt(1)
return ok && unicode.IsDigit(next)
}
func (l *Lexer) readHeredoc() (string, error) {
l.advance()
l.advance()
l.advance()
var buf strings.Builder
for {
if l.pos+2 < len(l.src) &&
l.src[l.pos] == '"' &&
l.src[l.pos+1] == '"' &&
l.src[l.pos+2] == '"' {
l.advance()
l.advance()
l.advance()
return dedentHeredoc(buf.String()), nil
}
ch, ok := l.peek()
if !ok {
return "", fmt.Errorf("unterminated heredoc")
}
buf.WriteRune(l.advance())
_ = ch
}
}
func dedentHeredoc(s string) string {
lines := strings.Split(s, "\n")
if len(lines) > 0 && strings.TrimSpace(lines[0]) == "" {
lines = lines[1:]
}
if len(lines) > 0 && strings.TrimSpace(lines[len(lines)-1]) == "" {
lines = lines[:len(lines)-1]
}
minIndent := -1
for _, line := range lines {
if strings.TrimSpace(line) == "" {
continue
}
indent := len(line) - len(strings.TrimLeft(line, " \t"))
if minIndent < 0 || indent < minIndent {
minIndent = indent
}
}
if minIndent < 0 {
minIndent = 0
}
var result strings.Builder
for i, line := range lines {
if len(line) >= minIndent {
result.WriteString(line[minIndent:])
} else {
result.WriteString(line)
}
if i < len(lines)-1 {
result.WriteByte('\n')
}
}
return result.String()
}
func (l *Lexer) readString() (string, error) {
l.advance()
var buf strings.Builder
for {
ch, ok := l.peek()
if !ok {
return "", fmt.Errorf("unterminated string at line %d", l.line)
}
if ch == '"' {
l.advance()
break
}
if ch == '\\' {
l.advance()
esc, ok := l.peek()
if !ok {
return "", fmt.Errorf("unterminated escape")
}
l.advance()
switch esc {
case 'n':
buf.WriteByte('\n')
case 't':
buf.WriteByte('\t')
case '\\':
buf.WriteByte('\\')
case '"':
buf.WriteByte('"')
default:
return "", fmt.Errorf("unknown escape \\%c", esc)
}
continue
}
buf.WriteRune(l.advance())
}
return buf.String(), nil
}
func (l *Lexer) readIdent() string {
var buf strings.Builder
for {
ch, ok := l.peek()
if !ok {
break
}
if unicode.IsLetter(ch) || unicode.IsDigit(ch) || ch == '_' {
buf.WriteRune(l.advance())
} else {
break
}
}
return buf.String()
}
func (l *Lexer) readNumberOrDuration(line, col int) (Token, error) {
var buf strings.Builder
isFloat := false
if ch, _ := l.peek(); ch == '-' {
buf.WriteRune(l.advance())
}
for {
ch, ok := l.peek()
if !ok {
break
}
if unicode.IsDigit(ch) {
buf.WriteRune(l.advance())
} else if ch == '.' && !isFloat {
isFloat = true
buf.WriteRune(l.advance())
} else {
break
}
}
suffix := l.tryReadDurationSuffix()
if suffix != "" {
return Token{TOKEN_DURATION, buf.String() + suffix, line, col}, nil
}
if isFloat {
return Token{TOKEN_FLOAT, buf.String(), line, col}, nil
}
return Token{TOKEN_INT, buf.String(), line, col}, nil
}
func (l *Lexer) tryReadDurationSuffix() string {
ch, ok := l.peek()
if !ok {
return ""
}
if ch == 'm' {
next, ok2 := l.peekAt(1)
if ok2 && next == 's' {
l.advance()
l.advance()
return "ms"
}
l.advance()
return "m"
}
if ch == 's' {
l.advance()
return "s"
}
return ""
}

687
dsl/parser.go Normal file
View File

@@ -0,0 +1,687 @@
package dsl
import (
"fmt"
"math"
"strconv"
"time"
)
type Parser struct {
tokens []Token
pos int
warns []string
}
func NewParser(tokens []Token) *Parser {
return &Parser{tokens: tokens}
}
func (p *Parser) Warnings() []string {
return p.warns
}
func (p *Parser) warn(msg string) {
p.warns = append(p.warns, msg)
}
func (p *Parser) peek() Token {
if p.pos >= len(p.tokens) {
return Token{Type: TOKEN_EOF}
}
return p.tokens[p.pos]
}
func (p *Parser) advance() Token {
t := p.peek()
if t.Type != TOKEN_EOF {
p.pos++
}
return t
}
func (p *Parser) expect(tt TokenType) (Token, error) {
t := p.peek()
if t.Type != tt {
return t, fmt.Errorf("%d:%d: expected %s, got %s (%q)", t.Line, t.Col, tt, t.Type, t.Value)
}
return p.advance(), nil
}
func (p *Parser) expectIdent(val string) error {
t := p.peek()
if t.Type != TOKEN_IDENT || t.Value != val {
return fmt.Errorf("%d:%d: expected %q, got %q", t.Line, t.Col, val, t.Value)
}
p.advance()
return nil
}
func (p *Parser) isIdent(val string) bool {
t := p.peek()
return t.Type == TOKEN_IDENT && t.Value == val
}
func Parse(src string) (*File, []string, error) {
tokens, err := NewLexer(src).Tokenize()
if err != nil {
return nil, nil, err
}
parser := NewParser(tokens)
file, err := parser.parseFile()
if err != nil {
return nil, parser.Warnings(), err
}
return file, parser.Warnings(), nil
}
func (p *Parser) parseFile() (*File, error) {
f := &File{}
for p.peek().Type != TOKEN_EOF {
t := p.peek()
if t.Type != TOKEN_IDENT {
return nil, fmt.Errorf("%d:%d: unexpected token %q", t.Line, t.Col, t.Value)
}
switch t.Value {
case "build":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
f.Build = s.Value
case "build_linux":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
f.BuildLinux = s.Value
case "build_windows":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
f.BuildWindows = s.Value
case "build_darwin":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
f.BuildDarwin = s.Value
case "binary":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
f.Binary = s.Value
case "normalize_crlf":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
b, err := p.parseBool()
if err != nil {
return nil, err
}
f.NormalizeCRLF = b
case "trim_trailing_ws":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
b, err := p.parseBool()
if err != nil {
return nil, err
}
f.TrimTrailingWS = b
case "timeout":
p.advance()
d, err := p.parseDuration()
if err != nil {
return nil, err
}
f.Timeout = d
case "group":
g, err := p.parseGroup(f.Timeout)
if err != nil {
return nil, err
}
f.Groups = append(f.Groups, g)
default:
return nil, fmt.Errorf("%d:%d: unexpected keyword %q", t.Line, t.Col, t.Value)
}
}
if err := p.validateWeights(f); err != nil {
return nil, err
}
return f, nil
}
func (p *Parser) validateWeights(f *File) error {
if len(f.Groups) == 0 {
return nil
}
sum := 0.0
for _, g := range f.Groups {
sum += g.Weight
}
if math.Abs(sum-1.0) > 0.001 {
p.warn(fmt.Sprintf("group weights sum to %.4f, expected 1.0", sum))
}
return nil
}
func (p *Parser) parseGroup(defaultTimeout time.Duration) (*Group, error) {
if err := p.expectIdent("group"); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
name, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_LBRACE); err != nil {
return nil, err
}
g := &Group{
Name: name.Value,
Timeout: defaultTimeout,
Env: map[string]string{},
Scoring: ScoringPartial,
}
for !p.isRBrace() {
t := p.peek()
if t.Type != TOKEN_IDENT {
return nil, fmt.Errorf("%d:%d: unexpected token %q in group", t.Line, t.Col, t.Value)
}
switch t.Value {
case "weight":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
w, err := p.parseFloat()
if err != nil {
return nil, err
}
g.Weight = w
case "timeout":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
d, err := p.parseDuration()
if err != nil {
return nil, err
}
g.Timeout = d
case "scoring":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_IDENT)
if err != nil {
return nil, err
}
switch s.Value {
case "partial":
g.Scoring = ScoringPartial
case "all_or_none":
g.Scoring = ScoringAllOrNone
default:
return nil, fmt.Errorf("%d:%d: unknown scoring mode %q", s.Line, s.Col, s.Value)
}
case "env":
p.advance()
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
key, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
val, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
g.Env[key.Value] = val.Value
case "wrapper":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
g.Wrapper = s.Value
case "test":
test, err := p.parseTest(g.Timeout)
if err != nil {
return nil, err
}
g.Tests = append(g.Tests, test)
case "pattern":
pat, err := p.parsePattern()
if err != nil {
return nil, err
}
g.Pattern = pat
default:
return nil, fmt.Errorf("%d:%d: unexpected keyword %q in group", t.Line, t.Col, t.Value)
}
}
if _, err := p.expect(TOKEN_RBRACE); err != nil {
return nil, err
}
return g, nil
}
func (p *Parser) parseTest(defaultTimeout time.Duration) (*Test, error) {
if err := p.expectIdent("test"); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
name, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_LBRACE); err != nil {
return nil, err
}
zero := 0
test := &Test{
Name: name.Value,
Timeout: defaultTimeout,
Env: map[string]string{},
InFiles: map[string]string{},
OutFiles: map[string]string{},
ExitCode: &zero,
Stdout: NoMatcher{},
Stderr: NoMatcher{},
}
for !p.isRBrace() {
t := p.peek()
if t.Type != TOKEN_IDENT {
return nil, fmt.Errorf("%d:%d: unexpected token in test body", t.Line, t.Col)
}
switch t.Value {
case "stdin":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
test.Stdin = &s.Value
case "stdout":
p.advance()
m, err := p.parseMatcherOrAssign()
if err != nil {
return nil, err
}
test.Stdout = m
case "stderr":
p.advance()
m, err := p.parseMatcherOrAssign()
if err != nil {
return nil, err
}
test.Stderr = m
case "args":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
args, err := p.parseStringList()
if err != nil {
return nil, err
}
test.Args = args
case "exitCode":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
n, err := p.parseInt()
if err != nil {
return nil, err
}
test.ExitCode = &n
case "timeout":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
d, err := p.parseDuration()
if err != nil {
return nil, err
}
test.Timeout = d
case "wrapper":
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
test.Wrapper = s.Value
case "env":
p.advance()
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
key, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
val, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
test.Env[key.Value] = val.Value
case "file":
p.advance()
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
fname, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
content, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
test.InFiles[fname.Value] = content.Value
case "outFile":
p.advance()
if _, err := p.expect(TOKEN_LPAREN); err != nil {
return nil, err
}
fname, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_RPAREN); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
content, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
test.OutFiles[fname.Value] = content.Value
default:
return nil, fmt.Errorf("%d:%d: unexpected keyword %q in test", t.Line, t.Col, t.Value)
}
}
if _, err := p.expect(TOKEN_RBRACE); err != nil {
return nil, err
}
return test, nil
}
func (p *Parser) parseMatcherOrAssign() (Matcher, error) {
t := p.peek()
if t.Type == TOKEN_ASSIGN {
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
return ExactMatcher{Value: s.Value}, nil
}
if t.Type == TOKEN_TILDE {
p.advance()
eps, err := p.parseFloat()
if err != nil {
return nil, err
}
if err := p.expectIdent("of"); err != nil {
return nil, err
}
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
return NumericEpsMatcher{Epsilon: eps, Value: s.Value}, nil
}
if t.Type == TOKEN_IDENT {
switch t.Value {
case "contains":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
return ContainsMatcher{Substr: s.Value}, nil
case "matches":
p.advance()
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
return RegexMatcher{Pattern: s.Value}, nil
case "anyOrder":
p.advance()
if _, err := p.expect(TOKEN_LBRACE); err != nil {
return nil, err
}
var lines []string
for !p.isRBrace() {
s, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
lines = append(lines, s.Value)
}
if _, err := p.expect(TOKEN_RBRACE); err != nil {
return nil, err
}
return AnyOrderMatcher{Lines: lines}, nil
}
}
return nil, fmt.Errorf("%d:%d: expected matcher (=, ~, contains, matches, anyOrder), got %q", t.Line, t.Col, t.Value)
}
func (p *Parser) parsePattern() (*Pattern, error) {
if err := p.expectIdent("pattern"); err != nil {
return nil, err
}
if _, err := p.expect(TOKEN_LBRACE); err != nil {
return nil, err
}
pat := &Pattern{}
for !p.isRBrace() {
t := p.peek()
if t.Type != TOKEN_IDENT {
return nil, fmt.Errorf("%d:%d: unexpected token in pattern", t.Line, t.Col)
}
p.advance()
if _, err := p.expect(TOKEN_ASSIGN); err != nil {
return nil, err
}
val, err := p.expect(TOKEN_STRING)
if err != nil {
return nil, err
}
switch t.Value {
case "input":
if pat.DirsGlob != "" {
pat.InputFile = val.Value
} else {
pat.InputGlob = val.Value
}
case "output":
if pat.DirsGlob != "" {
pat.OutputFile = val.Value
} else {
pat.OutputGlob = val.Value
}
case "dirs":
pat.DirsGlob = val.Value
default:
return nil, fmt.Errorf("%d:%d: unknown pattern field %q", t.Line, t.Col, t.Value)
}
}
if _, err := p.expect(TOKEN_RBRACE); err != nil {
return nil, err
}
return pat, nil
}
func (p *Parser) parseStringList() ([]string, error) {
var args []string
for p.peek().Type == TOKEN_STRING {
t := p.advance()
args = append(args, t.Value)
}
if len(args) == 0 {
return nil, fmt.Errorf("%d:%d: expected at least one string", p.peek().Line, p.peek().Col)
}
return args, nil
}
func (p *Parser) parseFloat() (float64, error) {
t := p.peek()
if t.Type == TOKEN_FLOAT || t.Type == TOKEN_INT {
p.advance()
return strconv.ParseFloat(t.Value, 64)
}
return 0, fmt.Errorf("%d:%d: expected float, got %s", t.Line, t.Col, t.Type)
}
func (p *Parser) parseBool() (bool, error) {
t := p.peek()
if t.Type != TOKEN_IDENT {
return false, fmt.Errorf("%d:%d: expected true/false, got %s %q", t.Line, t.Col, t.Type, t.Value)
}
switch t.Value {
case "true":
p.advance()
return true, nil
case "false":
p.advance()
return false, nil
default:
return false, fmt.Errorf("%d:%d: expected true/false, got %q", t.Line, t.Col, t.Value)
}
}
func (p *Parser) parseInt() (int, error) {
t, err := p.expect(TOKEN_INT)
if err != nil {
return 0, err
}
n, err := strconv.Atoi(t.Value)
if err != nil {
return 0, fmt.Errorf("%d:%d: invalid int %q", t.Line, t.Col, t.Value)
}
return n, nil
}
func (p *Parser) parseDuration() (time.Duration, error) {
t := p.peek()
if t.Type != TOKEN_DURATION {
return 0, fmt.Errorf("%d:%d: expected duration (e.g. 10s, 2m, 500ms), got %s %q", t.Line, t.Col, t.Type, t.Value)
}
p.advance()
d, err := time.ParseDuration(t.Value)
if err != nil {
return 0, fmt.Errorf("%d:%d: invalid duration %q: %w", t.Line, t.Col, t.Value, err)
}
return d, nil
}
func (p *Parser) isRBrace() bool {
return p.peek().Type == TOKEN_RBRACE || p.peek().Type == TOKEN_EOF
}

217
dsl/parser_test.go Normal file
View File

@@ -0,0 +1,217 @@
package dsl
import (
"testing"
"time"
)
func TestParseBasic(t *testing.T) {
src := `
build "go build -o solution ."
timeout 10s
group("basic") {
weight = 0.4
test("stdin stdout") {
stdin = "5\n1 3 2 5 4\n"
stdout = "1 2 3 4 5\n"
}
test("args") {
args = "--count" "3"
exitCode = 0
stdout contains "foo"
}
}
group("files") {
weight = 0.6
timeout = 5s
pattern {
input = "testdata/*/input.txt"
output = "testdata/*/output.txt"
}
}
`
f, warns, err := Parse(src)
if err != nil {
t.Fatalf("parse error: %v", err)
}
if len(warns) > 0 {
t.Logf("warnings: %v", warns)
}
if f.Build != "go build -o solution ." {
t.Errorf("wrong build: %q", f.Build)
}
if f.Timeout != 10*time.Second {
t.Errorf("wrong timeout: %v", f.Timeout)
}
if len(f.Groups) != 2 {
t.Fatalf("expected 2 groups, got %d", len(f.Groups))
}
g0 := f.Groups[0]
if g0.Name != "basic" {
t.Errorf("wrong group name: %q", g0.Name)
}
if g0.Weight != 0.4 {
t.Errorf("wrong weight: %v", g0.Weight)
}
if len(g0.Tests) != 2 {
t.Fatalf("expected 2 tests, got %d", len(g0.Tests))
}
t0 := g0.Tests[0]
if t0.Name != "stdin stdout" {
t.Errorf("wrong test name: %q", t0.Name)
}
if t0.Stdin == nil || *t0.Stdin != "5\n1 3 2 5 4\n" {
t.Errorf("wrong stdin: %v", t0.Stdin)
}
exact, ok := t0.Stdout.(ExactMatcher)
if !ok {
t.Errorf("expected ExactMatcher, got %T", t0.Stdout)
} else if exact.Value != "1 2 3 4 5\n" {
t.Errorf("wrong stdout: %q", exact.Value)
}
t1 := g0.Tests[1]
if len(t1.Args) != 2 || t1.Args[0] != "--count" || t1.Args[1] != "3" {
t.Errorf("wrong args: %v", t1.Args)
}
if _, ok := t1.Stdout.(ContainsMatcher); !ok {
t.Errorf("expected ContainsMatcher, got %T", t1.Stdout)
}
g1 := f.Groups[1]
if g1.Pattern == nil {
t.Fatal("expected pattern in group files")
}
if g1.Pattern.InputGlob != "testdata/*/input.txt" {
t.Errorf("wrong input glob: %q", g1.Pattern.InputGlob)
}
if g1.Timeout != 5*time.Second {
t.Errorf("wrong group timeout: %v", g1.Timeout)
}
}
func TestWeightWarning(t *testing.T) {
src := `
build "go build ."
group("a") {
weight = 0.3
test("x") { stdin = "" stdout = "" }
}
group("b") {
weight = 0.3
test("y") { stdin = "" stdout = "" }
}
`
_, warns, err := Parse(src)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(warns) == 0 {
t.Error("expected weight warning, got none")
}
t.Logf("warn: %v", warns)
}
func TestHeredoc(t *testing.T) {
src := `
build "go build ."
group("g") {
weight = 1.0
test("heredoc") {
stdin = """
hello
world
"""
stdout = """
HELLO
WORLD
"""
}
}
`
f, _, err := Parse(src)
if err != nil {
t.Fatalf("parse error: %v", err)
}
test := f.Groups[0].Tests[0]
if *test.Stdin != "hello\nworld" {
t.Errorf("wrong stdin: %q", *test.Stdin)
}
exact := test.Stdout.(ExactMatcher)
if exact.Value != "HELLO\nWORLD" {
t.Errorf("wrong stdout: %q", exact.Value)
}
}
func TestMatchers(t *testing.T) {
src := `
build "go build ."
group("matchers") {
weight = 1.0
test("regex") {
stdin = "ping\n"
stdout matches "pong.*"
}
test("numeric") {
stdin = "1 3\n"
stdout ~ 0.001 of "0.333"
}
test("any order") {
stdin = "data\n"
stdout anyOrder {
"line1"
"line2"
"line3"
}
}
test("stderr contains") {
args = "--invalid"
exitCode = 1
stderr contains "invalid flag"
}
}
`
f, _, err := Parse(src)
if err != nil {
t.Fatalf("parse error: %v", err)
}
tests := f.Groups[0].Tests
if _, ok := tests[0].Stdout.(RegexMatcher); !ok {
t.Errorf("test 0: expected RegexMatcher, got %T", tests[0].Stdout)
}
num, ok := tests[1].Stdout.(NumericEpsMatcher)
if !ok {
t.Errorf("test 1: expected NumericEpsMatcher, got %T", tests[1].Stdout)
} else if num.Epsilon != 0.001 {
t.Errorf("test 1: wrong epsilon %v", num.Epsilon)
}
if _, ok := tests[2].Stdout.(AnyOrderMatcher); !ok {
t.Errorf("test 2: expected AnyOrderMatcher, got %T", tests[2].Stdout)
}
if _, ok := tests[3].Stderr.(ContainsMatcher); !ok {
t.Errorf("test 3: expected ContainsMatcher on stderr, got %T", tests[3].Stderr)
}
if *tests[3].ExitCode != 1 {
t.Errorf("test 3: expected exitCode 1, got %v", *tests[3].ExitCode)
}
}