blob: b16f2edddb89404570bb576a60926ed2a91f9aaa [file] [log] [blame]
Sketch🕴️cdbb1892026-02-28 19:10:35 +04001package lexer
2
3import (
4 "matheval/token"
5 "testing"
6)
7
8func TestTokenizeEmpty(t *testing.T) {
9 tokens, err := Tokenize("")
10 if err != nil {
11 t.Fatalf("unexpected error: %v", err)
12 }
13 if len(tokens) != 1 || tokens[0].Type != token.EOF {
14 t.Fatalf("expected single EOF token, got %v", tokens)
15 }
16}
17
18func TestTokenizeWhitespaceOnly(t *testing.T) {
19 tokens, err := Tokenize(" \t\n ")
20 if err != nil {
21 t.Fatalf("unexpected error: %v", err)
22 }
23 if len(tokens) != 1 || tokens[0].Type != token.EOF {
24 t.Fatalf("expected single EOF token, got %v", tokens)
25 }
26}
27
28func TestTokenizeSingleNumber(t *testing.T) {
29 tokens, err := Tokenize("42")
30 if err != nil {
31 t.Fatalf("unexpected error: %v", err)
32 }
33 expect := []token.Token{
34 {Type: token.Number, Literal: "42", Pos: 0},
35 {Type: token.EOF, Literal: "", Pos: 2},
36 }
37 assertTokens(t, expect, tokens)
38}
39
40func TestTokenizeDecimalNumber(t *testing.T) {
41 tokens, err := Tokenize("3.14")
42 if err != nil {
43 t.Fatalf("unexpected error: %v", err)
44 }
45 expect := []token.Token{
46 {Type: token.Number, Literal: "3.14", Pos: 0},
47 {Type: token.EOF, Literal: "", Pos: 4},
48 }
49 assertTokens(t, expect, tokens)
50}
51
52func TestTokenizeLeadingDotNumber(t *testing.T) {
53 tokens, err := Tokenize(".5")
54 if err != nil {
55 t.Fatalf("unexpected error: %v", err)
56 }
57 expect := []token.Token{
58 {Type: token.Number, Literal: ".5", Pos: 0},
59 {Type: token.EOF, Literal: "", Pos: 2},
60 }
61 assertTokens(t, expect, tokens)
62}
63
64func TestTokenizeOperators(t *testing.T) {
65 tokens, err := Tokenize("+-*/")
66 if err != nil {
67 t.Fatalf("unexpected error: %v", err)
68 }
69 expect := []token.Token{
70 {Type: token.Plus, Literal: "+", Pos: 0},
71 {Type: token.Minus, Literal: "-", Pos: 1},
72 {Type: token.Star, Literal: "*", Pos: 2},
73 {Type: token.Slash, Literal: "/", Pos: 3},
74 {Type: token.EOF, Literal: "", Pos: 4},
75 }
76 assertTokens(t, expect, tokens)
77}
78
79func TestTokenizeParens(t *testing.T) {
80 tokens, err := Tokenize("()")
81 if err != nil {
82 t.Fatalf("unexpected error: %v", err)
83 }
84 expect := []token.Token{
85 {Type: token.LParen, Literal: "(", Pos: 0},
86 {Type: token.RParen, Literal: ")", Pos: 1},
87 {Type: token.EOF, Literal: "", Pos: 2},
88 }
89 assertTokens(t, expect, tokens)
90}
91
92func TestTokenizeFullExpression(t *testing.T) {
93 tokens, err := Tokenize("(1 + 2.5) * 3")
94 if err != nil {
95 t.Fatalf("unexpected error: %v", err)
96 }
97 expect := []token.Token{
98 {Type: token.LParen, Literal: "(", Pos: 0},
99 {Type: token.Number, Literal: "1", Pos: 1},
100 {Type: token.Plus, Literal: "+", Pos: 3},
101 {Type: token.Number, Literal: "2.5", Pos: 5},
102 {Type: token.RParen, Literal: ")", Pos: 8},
103 {Type: token.Star, Literal: "*", Pos: 10},
104 {Type: token.Number, Literal: "3", Pos: 12},
105 {Type: token.EOF, Literal: "", Pos: 13},
106 }
107 assertTokens(t, expect, tokens)
108}
109
110func TestTokenizeNoSpaces(t *testing.T) {
111 tokens, err := Tokenize("1+2")
112 if err != nil {
113 t.Fatalf("unexpected error: %v", err)
114 }
115 expect := []token.Token{
116 {Type: token.Number, Literal: "1", Pos: 0},
117 {Type: token.Plus, Literal: "+", Pos: 1},
118 {Type: token.Number, Literal: "2", Pos: 2},
119 {Type: token.EOF, Literal: "", Pos: 3},
120 }
121 assertTokens(t, expect, tokens)
122}
123
124func TestTokenizeInvalidCharacter(t *testing.T) {
125 _, err := Tokenize("1 + @")
126 if err == nil {
127 t.Fatal("expected error for invalid character")
128 }
129}
130
131func TestTokenizeMultipleInvalidCharacters(t *testing.T) {
132 _, err := Tokenize("1 & 2")
133 if err == nil {
134 t.Fatal("expected error for invalid character")
135 }
136}
137
138func TestTokenizeMultipleDecimals(t *testing.T) {
139 // "1.2.3" — the lexer should read "1.2" as a number, then ".3" as another number
140 tokens, err := Tokenize("1.2.3")
141 if err != nil {
142 t.Fatalf("unexpected error: %v", err)
143 }
144 expect := []token.Token{
145 {Type: token.Number, Literal: "1.2", Pos: 0},
146 {Type: token.Number, Literal: ".3", Pos: 3},
147 {Type: token.EOF, Literal: "", Pos: 5},
148 }
149 assertTokens(t, expect, tokens)
150}
151
Sketch🕴️dc6d8f62026-02-28 20:45:13 +0400152func TestTokenizeSingleIdent(t *testing.T) {
153 tokens, err := Tokenize("x")
154 if err != nil {
155 t.Fatalf("unexpected error: %v", err)
156 }
157 expect := []token.Token{
158 {Type: token.Ident, Literal: "x", Pos: 0},
159 {Type: token.EOF, Literal: "", Pos: 1},
160 }
161 assertTokens(t, expect, tokens)
162}
163
164func TestTokenizeMultiCharIdent(t *testing.T) {
165 tokens, err := Tokenize("foo")
166 if err != nil {
167 t.Fatalf("unexpected error: %v", err)
168 }
169 expect := []token.Token{
170 {Type: token.Ident, Literal: "foo", Pos: 0},
171 {Type: token.EOF, Literal: "", Pos: 3},
172 }
173 assertTokens(t, expect, tokens)
174}
175
176func TestTokenizeIdentWithDigits(t *testing.T) {
177 tokens, err := Tokenize("x2")
178 if err != nil {
179 t.Fatalf("unexpected error: %v", err)
180 }
181 expect := []token.Token{
182 {Type: token.Ident, Literal: "x2", Pos: 0},
183 {Type: token.EOF, Literal: "", Pos: 2},
184 }
185 assertTokens(t, expect, tokens)
186}
187
188func TestTokenizeIdentWithUnderscore(t *testing.T) {
189 tokens, err := Tokenize("my_func")
190 if err != nil {
191 t.Fatalf("unexpected error: %v", err)
192 }
193 expect := []token.Token{
194 {Type: token.Ident, Literal: "my_func", Pos: 0},
195 {Type: token.EOF, Literal: "", Pos: 7},
196 }
197 assertTokens(t, expect, tokens)
198}
199
200func TestTokenizeComma(t *testing.T) {
201 tokens, err := Tokenize(",")
202 if err != nil {
203 t.Fatalf("unexpected error: %v", err)
204 }
205 expect := []token.Token{
206 {Type: token.Comma, Literal: ",", Pos: 0},
207 {Type: token.EOF, Literal: "", Pos: 1},
208 }
209 assertTokens(t, expect, tokens)
210}
211
212func TestTokenizeEquals(t *testing.T) {
213 tokens, err := Tokenize("=")
214 if err != nil {
215 t.Fatalf("unexpected error: %v", err)
216 }
217 expect := []token.Token{
218 {Type: token.Equals, Literal: "=", Pos: 0},
219 {Type: token.EOF, Literal: "", Pos: 1},
220 }
221 assertTokens(t, expect, tokens)
222}
223
224func TestTokenizeFuncDefinition(t *testing.T) {
225 tokens, err := Tokenize("f(x) = x + 1")
226 if err != nil {
227 t.Fatalf("unexpected error: %v", err)
228 }
229 expect := []token.Token{
230 {Type: token.Ident, Literal: "f", Pos: 0},
231 {Type: token.LParen, Literal: "(", Pos: 1},
232 {Type: token.Ident, Literal: "x", Pos: 2},
233 {Type: token.RParen, Literal: ")", Pos: 3},
234 {Type: token.Equals, Literal: "=", Pos: 5},
235 {Type: token.Ident, Literal: "x", Pos: 7},
236 {Type: token.Plus, Literal: "+", Pos: 9},
237 {Type: token.Number, Literal: "1", Pos: 11},
238 {Type: token.EOF, Literal: "", Pos: 12},
239 }
240 assertTokens(t, expect, tokens)
241}
242
243func TestTokenizeFuncCallWithArgs(t *testing.T) {
244 tokens, err := Tokenize("f(1, 2)")
245 if err != nil {
246 t.Fatalf("unexpected error: %v", err)
247 }
248 expect := []token.Token{
249 {Type: token.Ident, Literal: "f", Pos: 0},
250 {Type: token.LParen, Literal: "(", Pos: 1},
251 {Type: token.Number, Literal: "1", Pos: 2},
252 {Type: token.Comma, Literal: ",", Pos: 3},
253 {Type: token.Number, Literal: "2", Pos: 5},
254 {Type: token.RParen, Literal: ")", Pos: 6},
255 {Type: token.EOF, Literal: "", Pos: 7},
256 }
257 assertTokens(t, expect, tokens)
258}
259
260func TestTokenizeMultiParamFuncDef(t *testing.T) {
261 tokens, err := Tokenize("add(x, y) = x + y")
262 if err != nil {
263 t.Fatalf("unexpected error: %v", err)
264 }
265 expect := []token.Token{
266 {Type: token.Ident, Literal: "add", Pos: 0},
267 {Type: token.LParen, Literal: "(", Pos: 3},
268 {Type: token.Ident, Literal: "x", Pos: 4},
269 {Type: token.Comma, Literal: ",", Pos: 5},
270 {Type: token.Ident, Literal: "y", Pos: 7},
271 {Type: token.RParen, Literal: ")", Pos: 8},
272 {Type: token.Equals, Literal: "=", Pos: 10},
273 {Type: token.Ident, Literal: "x", Pos: 12},
274 {Type: token.Plus, Literal: "+", Pos: 14},
275 {Type: token.Ident, Literal: "y", Pos: 16},
276 {Type: token.EOF, Literal: "", Pos: 17},
277 }
278 assertTokens(t, expect, tokens)
279}
280
281func TestTokenizeFuncCallInExpression(t *testing.T) {
282 tokens, err := Tokenize("f(1+2, 3*4) + 5")
283 if err != nil {
284 t.Fatalf("unexpected error: %v", err)
285 }
286 expect := []token.Token{
287 {Type: token.Ident, Literal: "f", Pos: 0},
288 {Type: token.LParen, Literal: "(", Pos: 1},
289 {Type: token.Number, Literal: "1", Pos: 2},
290 {Type: token.Plus, Literal: "+", Pos: 3},
291 {Type: token.Number, Literal: "2", Pos: 4},
292 {Type: token.Comma, Literal: ",", Pos: 5},
293 {Type: token.Number, Literal: "3", Pos: 7},
294 {Type: token.Star, Literal: "*", Pos: 8},
295 {Type: token.Number, Literal: "4", Pos: 9},
296 {Type: token.RParen, Literal: ")", Pos: 10},
297 {Type: token.Plus, Literal: "+", Pos: 12},
298 {Type: token.Number, Literal: "5", Pos: 14},
299 {Type: token.EOF, Literal: "", Pos: 15},
300 }
301 assertTokens(t, expect, tokens)
302}
303
Sketch🕴️cdbb1892026-02-28 19:10:35 +0400304// assertTokens is a test helper that compares two token slices.
305func assertTokens(t *testing.T, want, got []token.Token) {
306 t.Helper()
307 if len(want) != len(got) {
308 t.Fatalf("token count: want %d, got %d\nwant: %v\ngot: %v", len(want), len(got), want, got)
309 }
310 for i := range want {
311 if want[i].Type != got[i].Type {
312 t.Errorf("token[%d].Type: want %v, got %v", i, want[i].Type, got[i].Type)
313 }
314 if want[i].Literal != got[i].Literal {
315 t.Errorf("token[%d].Literal: want %q, got %q", i, want[i].Literal, got[i].Literal)
316 }
317 if want[i].Pos != got[i].Pos {
318 t.Errorf("token[%d].Pos: want %d, got %d", i, want[i].Pos, got[i].Pos)
319 }
320 }
321}