Add lexer: Tokenize function with full test coverage

Implements lexer.Tokenize(input string) ([]token.Token, error)
- Skips whitespace
- Parses integer and decimal numbers (including leading dot e.g. .5)
- Handles all operators: + - * /
- Handles parentheses: ( )
- Appends EOF token
- Returns error on invalid characters with position info
- 12 unit tests covering: empty, whitespace-only, integers, decimals,
  leading-dot numbers, operators, parens, full expressions, no-space
  expressions, invalid chars, multiple decimals (1.2.3)
diff --git a/lexer/lexer.go b/lexer/lexer.go
new file mode 100644
index 0000000..0914b72
--- /dev/null
+++ b/lexer/lexer.go
@@ -0,0 +1,83 @@
+package lexer
+
+import (
+	"fmt"
+	"matheval/token"
+)
+
+// Tokenize converts an input string into a slice of tokens.
+// Returns an error if the input contains invalid characters.
+func Tokenize(input string) ([]token.Token, error) {
+	var tokens []token.Token
+	i := 0
+
+	for i < len(input) {
+		ch := input[i]
+
+		// Skip whitespace.
+		if ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' {
+			i++
+			continue
+		}
+
+		// Number: digits and dots.
+		if isDigit(ch) || ch == '.' {
+			start := i
+			hasDot := false
+			for i < len(input) && (isDigit(input[i]) || input[i] == '.') {
+				if input[i] == '.' {
+					if hasDot {
+						break // second dot ends this number
+					}
+					hasDot = true
+				}
+				i++
+			}
+			tokens = append(tokens, token.Token{
+				Type:    token.Number,
+				Literal: input[start:i],
+				Pos:     start,
+			})
+			continue
+		}
+
+		// Single-character tokens.
+		var typ token.Type
+		switch ch {
+		case '+':
+			typ = token.Plus
+		case '-':
+			typ = token.Minus
+		case '*':
+			typ = token.Star
+		case '/':
+			typ = token.Slash
+		case '(':
+			typ = token.LParen
+		case ')':
+			typ = token.RParen
+		default:
+			return nil, fmt.Errorf("unexpected character %q at position %d", string(ch), i)
+		}
+
+		tokens = append(tokens, token.Token{
+			Type:    typ,
+			Literal: string(ch),
+			Pos:     i,
+		})
+		i++
+	}
+
+	// Always append EOF.
+	tokens = append(tokens, token.Token{
+		Type:    token.EOF,
+		Literal: "",
+		Pos:     i,
+	})
+
+	return tokens, nil
+}
+
+func isDigit(ch byte) bool {
+	return ch >= '0' && ch <= '9'
+}