lexer: recognize identifiers, comma, and equals tokens
- Add identifier scanning: starts with letter/underscore, continues with
letters/digits/underscores. Produces Ident tokens.
- Add comma and equals to single-char token switch.
- Add isLetter() helper.
- Add 9 new tests covering: single ident, multi-char ident, ident with
digits, ident with underscore, comma, equals, function definition
syntax, function call with args, multi-param func def, func call in
expression.
diff --git a/lexer/lexer.go b/lexer/lexer.go
index 0914b72..d1f55d5 100644
--- a/lexer/lexer.go
+++ b/lexer/lexer.go
@@ -41,6 +41,20 @@
continue
}
+ // Identifier: starts with letter, continues with letters/digits.
+ if isLetter(ch) {
+ start := i
+ for i < len(input) && (isLetter(input[i]) || isDigit(input[i])) {
+ i++
+ }
+ tokens = append(tokens, token.Token{
+ Type: token.Ident,
+ Literal: input[start:i],
+ Pos: start,
+ })
+ continue
+ }
+
// Single-character tokens.
var typ token.Type
switch ch {
@@ -56,6 +70,10 @@
typ = token.LParen
case ')':
typ = token.RParen
+ case ',':
+ typ = token.Comma
+ case '=':
+ typ = token.Equals
default:
return nil, fmt.Errorf("unexpected character %q at position %d", string(ch), i)
}
@@ -81,3 +99,7 @@
func isDigit(ch byte) bool {
return ch >= '0' && ch <= '9'
}
+
+func isLetter(ch byte) bool {
+ return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_'
+}
diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go
index 94a1627..b16f2ed 100644
--- a/lexer/lexer_test.go
+++ b/lexer/lexer_test.go
@@ -149,6 +149,158 @@
assertTokens(t, expect, tokens)
}
+func TestTokenizeSingleIdent(t *testing.T) {
+ tokens, err := Tokenize("x")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "x", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 1},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeMultiCharIdent(t *testing.T) {
+ tokens, err := Tokenize("foo")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "foo", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 3},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeIdentWithDigits(t *testing.T) {
+ tokens, err := Tokenize("x2")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "x2", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 2},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeIdentWithUnderscore(t *testing.T) {
+ tokens, err := Tokenize("my_func")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "my_func", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 7},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeComma(t *testing.T) {
+ tokens, err := Tokenize(",")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Comma, Literal: ",", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 1},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeEquals(t *testing.T) {
+ tokens, err := Tokenize("=")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Equals, Literal: "=", Pos: 0},
+ {Type: token.EOF, Literal: "", Pos: 1},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeFuncDefinition(t *testing.T) {
+ tokens, err := Tokenize("f(x) = x + 1")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "f", Pos: 0},
+ {Type: token.LParen, Literal: "(", Pos: 1},
+ {Type: token.Ident, Literal: "x", Pos: 2},
+ {Type: token.RParen, Literal: ")", Pos: 3},
+ {Type: token.Equals, Literal: "=", Pos: 5},
+ {Type: token.Ident, Literal: "x", Pos: 7},
+ {Type: token.Plus, Literal: "+", Pos: 9},
+ {Type: token.Number, Literal: "1", Pos: 11},
+ {Type: token.EOF, Literal: "", Pos: 12},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeFuncCallWithArgs(t *testing.T) {
+ tokens, err := Tokenize("f(1, 2)")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "f", Pos: 0},
+ {Type: token.LParen, Literal: "(", Pos: 1},
+ {Type: token.Number, Literal: "1", Pos: 2},
+ {Type: token.Comma, Literal: ",", Pos: 3},
+ {Type: token.Number, Literal: "2", Pos: 5},
+ {Type: token.RParen, Literal: ")", Pos: 6},
+ {Type: token.EOF, Literal: "", Pos: 7},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeMultiParamFuncDef(t *testing.T) {
+ tokens, err := Tokenize("add(x, y) = x + y")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "add", Pos: 0},
+ {Type: token.LParen, Literal: "(", Pos: 3},
+ {Type: token.Ident, Literal: "x", Pos: 4},
+ {Type: token.Comma, Literal: ",", Pos: 5},
+ {Type: token.Ident, Literal: "y", Pos: 7},
+ {Type: token.RParen, Literal: ")", Pos: 8},
+ {Type: token.Equals, Literal: "=", Pos: 10},
+ {Type: token.Ident, Literal: "x", Pos: 12},
+ {Type: token.Plus, Literal: "+", Pos: 14},
+ {Type: token.Ident, Literal: "y", Pos: 16},
+ {Type: token.EOF, Literal: "", Pos: 17},
+ }
+ assertTokens(t, expect, tokens)
+}
+
+func TestTokenizeFuncCallInExpression(t *testing.T) {
+ tokens, err := Tokenize("f(1+2, 3*4) + 5")
+ if err != nil {
+ t.Fatalf("unexpected error: %v", err)
+ }
+ expect := []token.Token{
+ {Type: token.Ident, Literal: "f", Pos: 0},
+ {Type: token.LParen, Literal: "(", Pos: 1},
+ {Type: token.Number, Literal: "1", Pos: 2},
+ {Type: token.Plus, Literal: "+", Pos: 3},
+ {Type: token.Number, Literal: "2", Pos: 4},
+ {Type: token.Comma, Literal: ",", Pos: 5},
+ {Type: token.Number, Literal: "3", Pos: 7},
+ {Type: token.Star, Literal: "*", Pos: 8},
+ {Type: token.Number, Literal: "4", Pos: 9},
+ {Type: token.RParen, Literal: ")", Pos: 10},
+ {Type: token.Plus, Literal: "+", Pos: 12},
+ {Type: token.Number, Literal: "5", Pos: 14},
+ {Type: token.EOF, Literal: "", Pos: 15},
+ }
+ assertTokens(t, expect, tokens)
+}
+
// assertTokens is a test helper that compares two token slices.
func assertTokens(t *testing.T, want, got []token.Token) {
t.Helper()