blob: b16f2edddb89404570bb576a60926ed2a91f9aaa [file] [log] [blame]
package lexer
import (
"matheval/token"
"testing"
)
func TestTokenizeEmpty(t *testing.T) {
tokens, err := Tokenize("")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(tokens) != 1 || tokens[0].Type != token.EOF {
t.Fatalf("expected single EOF token, got %v", tokens)
}
}
func TestTokenizeWhitespaceOnly(t *testing.T) {
tokens, err := Tokenize(" \t\n ")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(tokens) != 1 || tokens[0].Type != token.EOF {
t.Fatalf("expected single EOF token, got %v", tokens)
}
}
func TestTokenizeSingleNumber(t *testing.T) {
tokens, err := Tokenize("42")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Number, Literal: "42", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 2},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeDecimalNumber(t *testing.T) {
tokens, err := Tokenize("3.14")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Number, Literal: "3.14", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 4},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeLeadingDotNumber(t *testing.T) {
tokens, err := Tokenize(".5")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Number, Literal: ".5", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 2},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeOperators(t *testing.T) {
tokens, err := Tokenize("+-*/")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Plus, Literal: "+", Pos: 0},
{Type: token.Minus, Literal: "-", Pos: 1},
{Type: token.Star, Literal: "*", Pos: 2},
{Type: token.Slash, Literal: "/", Pos: 3},
{Type: token.EOF, Literal: "", Pos: 4},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeParens(t *testing.T) {
tokens, err := Tokenize("()")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.LParen, Literal: "(", Pos: 0},
{Type: token.RParen, Literal: ")", Pos: 1},
{Type: token.EOF, Literal: "", Pos: 2},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeFullExpression(t *testing.T) {
tokens, err := Tokenize("(1 + 2.5) * 3")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.LParen, Literal: "(", Pos: 0},
{Type: token.Number, Literal: "1", Pos: 1},
{Type: token.Plus, Literal: "+", Pos: 3},
{Type: token.Number, Literal: "2.5", Pos: 5},
{Type: token.RParen, Literal: ")", Pos: 8},
{Type: token.Star, Literal: "*", Pos: 10},
{Type: token.Number, Literal: "3", Pos: 12},
{Type: token.EOF, Literal: "", Pos: 13},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeNoSpaces(t *testing.T) {
tokens, err := Tokenize("1+2")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Number, Literal: "1", Pos: 0},
{Type: token.Plus, Literal: "+", Pos: 1},
{Type: token.Number, Literal: "2", Pos: 2},
{Type: token.EOF, Literal: "", Pos: 3},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeInvalidCharacter(t *testing.T) {
_, err := Tokenize("1 + @")
if err == nil {
t.Fatal("expected error for invalid character")
}
}
func TestTokenizeMultipleInvalidCharacters(t *testing.T) {
_, err := Tokenize("1 & 2")
if err == nil {
t.Fatal("expected error for invalid character")
}
}
func TestTokenizeMultipleDecimals(t *testing.T) {
// "1.2.3" — the lexer should read "1.2" as a number, then ".3" as another number
tokens, err := Tokenize("1.2.3")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Number, Literal: "1.2", Pos: 0},
{Type: token.Number, Literal: ".3", Pos: 3},
{Type: token.EOF, Literal: "", Pos: 5},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeSingleIdent(t *testing.T) {
tokens, err := Tokenize("x")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "x", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 1},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeMultiCharIdent(t *testing.T) {
tokens, err := Tokenize("foo")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "foo", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 3},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeIdentWithDigits(t *testing.T) {
tokens, err := Tokenize("x2")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "x2", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 2},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeIdentWithUnderscore(t *testing.T) {
tokens, err := Tokenize("my_func")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "my_func", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 7},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeComma(t *testing.T) {
tokens, err := Tokenize(",")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Comma, Literal: ",", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 1},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeEquals(t *testing.T) {
tokens, err := Tokenize("=")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Equals, Literal: "=", Pos: 0},
{Type: token.EOF, Literal: "", Pos: 1},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeFuncDefinition(t *testing.T) {
tokens, err := Tokenize("f(x) = x + 1")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "f", Pos: 0},
{Type: token.LParen, Literal: "(", Pos: 1},
{Type: token.Ident, Literal: "x", Pos: 2},
{Type: token.RParen, Literal: ")", Pos: 3},
{Type: token.Equals, Literal: "=", Pos: 5},
{Type: token.Ident, Literal: "x", Pos: 7},
{Type: token.Plus, Literal: "+", Pos: 9},
{Type: token.Number, Literal: "1", Pos: 11},
{Type: token.EOF, Literal: "", Pos: 12},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeFuncCallWithArgs(t *testing.T) {
tokens, err := Tokenize("f(1, 2)")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "f", Pos: 0},
{Type: token.LParen, Literal: "(", Pos: 1},
{Type: token.Number, Literal: "1", Pos: 2},
{Type: token.Comma, Literal: ",", Pos: 3},
{Type: token.Number, Literal: "2", Pos: 5},
{Type: token.RParen, Literal: ")", Pos: 6},
{Type: token.EOF, Literal: "", Pos: 7},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeMultiParamFuncDef(t *testing.T) {
tokens, err := Tokenize("add(x, y) = x + y")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "add", Pos: 0},
{Type: token.LParen, Literal: "(", Pos: 3},
{Type: token.Ident, Literal: "x", Pos: 4},
{Type: token.Comma, Literal: ",", Pos: 5},
{Type: token.Ident, Literal: "y", Pos: 7},
{Type: token.RParen, Literal: ")", Pos: 8},
{Type: token.Equals, Literal: "=", Pos: 10},
{Type: token.Ident, Literal: "x", Pos: 12},
{Type: token.Plus, Literal: "+", Pos: 14},
{Type: token.Ident, Literal: "y", Pos: 16},
{Type: token.EOF, Literal: "", Pos: 17},
}
assertTokens(t, expect, tokens)
}
func TestTokenizeFuncCallInExpression(t *testing.T) {
tokens, err := Tokenize("f(1+2, 3*4) + 5")
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
expect := []token.Token{
{Type: token.Ident, Literal: "f", Pos: 0},
{Type: token.LParen, Literal: "(", Pos: 1},
{Type: token.Number, Literal: "1", Pos: 2},
{Type: token.Plus, Literal: "+", Pos: 3},
{Type: token.Number, Literal: "2", Pos: 4},
{Type: token.Comma, Literal: ",", Pos: 5},
{Type: token.Number, Literal: "3", Pos: 7},
{Type: token.Star, Literal: "*", Pos: 8},
{Type: token.Number, Literal: "4", Pos: 9},
{Type: token.RParen, Literal: ")", Pos: 10},
{Type: token.Plus, Literal: "+", Pos: 12},
{Type: token.Number, Literal: "5", Pos: 14},
{Type: token.EOF, Literal: "", Pos: 15},
}
assertTokens(t, expect, tokens)
}
// assertTokens is a test helper that compares two token slices.
func assertTokens(t *testing.T, want, got []token.Token) {
t.Helper()
if len(want) != len(got) {
t.Fatalf("token count: want %d, got %d\nwant: %v\ngot: %v", len(want), len(got), want, got)
}
for i := range want {
if want[i].Type != got[i].Type {
t.Errorf("token[%d].Type: want %v, got %v", i, want[i].Type, got[i].Type)
}
if want[i].Literal != got[i].Literal {
t.Errorf("token[%d].Literal: want %q, got %q", i, want[i].Literal, got[i].Literal)
}
if want[i].Pos != got[i].Pos {
t.Errorf("token[%d].Pos: want %d, got %d", i, want[i].Pos, got[i].Pos)
}
}
}