Mako/tests/lexer_test.go
2025-05-06 15:55:55 -05:00

238 lines
5.5 KiB
Go

package tests
import (
"testing"
assert "git.sharkk.net/Go/Assert"
"git.sharkk.net/Sharkk/Mako/lexer"
)
func TestLexerSimpleTokens(t *testing.T) {
input := `= + - * / ( ) { } [ ] , ; "hello" 123 if then else end true false`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenEqual, "="},
{lexer.TokenPlus, "+"},
{lexer.TokenMinus, "-"},
{lexer.TokenStar, "*"},
{lexer.TokenSlash, "/"},
{lexer.TokenLeftParen, "("},
{lexer.TokenRightParen, ")"},
{lexer.TokenLeftBrace, "{"},
{lexer.TokenRightBrace, "}"},
{lexer.TokenLeftBracket, "["},
{lexer.TokenRightBracket, "]"},
{lexer.TokenComma, ","},
{lexer.TokenSemicolon, ";"},
{lexer.TokenString, "hello"},
{lexer.TokenNumber, "123"},
{lexer.TokenIf, "if"},
{lexer.TokenThen, "then"},
{lexer.TokenElse, "else"},
{lexer.TokenEnd, "end"},
{lexer.TokenTrue, "true"},
{lexer.TokenFalse, "false"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerCompoundTokens(t *testing.T) {
input := `== != < > <= >= and or not elseif`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenEqualEqual, "=="},
{lexer.TokenNotEqual, "!="},
{lexer.TokenLessThan, "<"},
{lexer.TokenGreaterThan, ">"},
{lexer.TokenLessEqual, "<="},
{lexer.TokenGreaterEqual, ">="},
{lexer.TokenAnd, "and"},
{lexer.TokenOr, "or"},
{lexer.TokenNot, "not"},
{lexer.TokenElseIf, "elseif"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerIdentifiersAndKeywords(t *testing.T) {
input := `variable echo if then else end true false and or not x y_1 _var UPPERCASE`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenIdentifier, "variable"},
{lexer.TokenEcho, "echo"},
{lexer.TokenIf, "if"},
{lexer.TokenThen, "then"},
{lexer.TokenElse, "else"},
{lexer.TokenEnd, "end"},
{lexer.TokenTrue, "true"},
{lexer.TokenFalse, "false"},
{lexer.TokenAnd, "and"},
{lexer.TokenOr, "or"},
{lexer.TokenNot, "not"},
{lexer.TokenIdentifier, "x"},
{lexer.TokenIdentifier, "y_1"},
{lexer.TokenIdentifier, "_var"},
{lexer.TokenIdentifier, "UPPERCASE"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerNumbers(t *testing.T) {
input := `0 123 999999`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenNumber, "0"},
{lexer.TokenNumber, "123"},
{lexer.TokenNumber, "999999"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerStrings(t *testing.T) {
input := `"" "hello" "one two three" "special chars: !@#$%^&*()"`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenString, ""},
{lexer.TokenString, "hello"},
{lexer.TokenString, "one two three"},
{lexer.TokenString, "special chars: !@#$%^&*()"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerComments(t *testing.T) {
input := `x = 5 // This is a comment
y = 10 // Another comment`
lex := lexer.New(input)
expected := []struct {
expectedType lexer.TokenType
expectedValue string
}{
{lexer.TokenIdentifier, "x"},
{lexer.TokenEqual, "="},
{lexer.TokenNumber, "5"},
{lexer.TokenIdentifier, "y"},
{lexer.TokenEqual, "="},
{lexer.TokenNumber, "10"},
{lexer.TokenEOF, ""},
}
for _, exp := range expected {
tok := lex.NextToken()
assert.Equal(t, exp.expectedType, tok.Type)
assert.Equal(t, exp.expectedValue, tok.Value)
}
}
func TestLexerCompositeCode(t *testing.T) {
input := `
// Sample Mako code
x = 10;
y = 20;
if x < y then
echo "x is less than y";
else
echo "x is not less than y";
end
// Table example
table = {
name = "John",
age = 30,
isActive = true
};
echo table["name"];
`
lex := lexer.New(input)
// Spot check some tokens to avoid an overly verbose test
assert.Equal(t, lexer.TokenIdentifier, lex.NextToken().Type) // x
assert.Equal(t, lexer.TokenEqual, lex.NextToken().Type) // =
assert.Equal(t, lexer.TokenNumber, lex.NextToken().Type) // 10
assert.Equal(t, lexer.TokenSemicolon, lex.NextToken().Type) // ;
// Skip ahead to check table creation
for i := 0; i < 13; i++ {
lex.NextToken()
}
assert.Equal(t, lexer.TokenIdentifier, lex.NextToken().Type) // table
assert.Equal(t, lexer.TokenEqual, lex.NextToken().Type) // =
assert.Equal(t, lexer.TokenLeftBrace, lex.NextToken().Type) // {
// Check echo table["name"]
for i := 0; i < 15; i++ {
lex.NextToken()
}
tok := lex.NextToken()
assert.Equal(t, lexer.TokenEcho, tok.Type)
tok = lex.NextToken()
assert.Equal(t, lexer.TokenIdentifier, tok.Type)
assert.Equal(t, "table", tok.Value)
tok = lex.NextToken()
assert.Equal(t, lexer.TokenLeftBracket, tok.Type)
tok = lex.NextToken()
assert.Equal(t, lexer.TokenString, tok.Type)
assert.Equal(t, "name", tok.Value)
}