logical conditions, elseif
This commit is contained in:
parent
67d586c789
commit
1bc3357aff
@ -157,34 +157,87 @@ func (c *compiler) compileExpression(expr parser.Expression) {
|
||||
|
||||
// Arithmetic expressions
|
||||
case *parser.InfixExpression:
|
||||
// Compile left and right expressions
|
||||
c.compileExpression(e.Left)
|
||||
c.compileExpression(e.Right)
|
||||
|
||||
// Generate the appropriate operation
|
||||
switch e.Operator {
|
||||
case "+":
|
||||
c.emit(types.OpAdd, 0)
|
||||
case "-":
|
||||
c.emit(types.OpSubtract, 0)
|
||||
case "*":
|
||||
c.emit(types.OpMultiply, 0)
|
||||
case "/":
|
||||
c.emit(types.OpDivide, 0)
|
||||
case "==":
|
||||
c.emit(types.OpEqual, 0)
|
||||
case "!=":
|
||||
c.emit(types.OpNotEqual, 0)
|
||||
case "<":
|
||||
c.emit(types.OpLessThan, 0)
|
||||
case ">":
|
||||
c.emit(types.OpGreaterThan, 0)
|
||||
case "<=":
|
||||
c.emit(types.OpLessEqual, 0)
|
||||
case ">=":
|
||||
c.emit(types.OpGreaterEqual, 0)
|
||||
case "and":
|
||||
// Compile left operand
|
||||
c.compileExpression(e.Left)
|
||||
|
||||
// Duplicate to check condition
|
||||
c.emit(types.OpDup, 0)
|
||||
|
||||
// Jump if false (short-circuit)
|
||||
jumpFalsePos := len(c.instructions)
|
||||
c.emit(types.OpJumpIfFalse, 0) // Will backpatch
|
||||
|
||||
// Pop the duplicate since we'll replace it
|
||||
c.emit(types.OpPop, 0)
|
||||
|
||||
// Compile right operand
|
||||
c.compileExpression(e.Right)
|
||||
|
||||
// Jump target for short-circuit
|
||||
endPos := len(c.instructions)
|
||||
c.instructions[jumpFalsePos].Operand = endPos
|
||||
|
||||
case "or":
|
||||
// Compile left operand
|
||||
c.compileExpression(e.Left)
|
||||
|
||||
// Duplicate to check condition
|
||||
c.emit(types.OpDup, 0)
|
||||
|
||||
// Need to check if it's truthy to short-circuit
|
||||
falseJumpPos := len(c.instructions)
|
||||
c.emit(types.OpJumpIfFalse, 0) // Jump to right eval if false
|
||||
|
||||
// If truthy, jump to end
|
||||
trueJumpPos := len(c.instructions)
|
||||
c.emit(types.OpJump, 0) // Jump to end if true
|
||||
|
||||
// Position for false case
|
||||
falsePos := len(c.instructions)
|
||||
c.instructions[falseJumpPos].Operand = falsePos
|
||||
|
||||
// Pop the duplicate since we'll replace it
|
||||
c.emit(types.OpPop, 0)
|
||||
|
||||
// Compile right operand
|
||||
c.compileExpression(e.Right)
|
||||
|
||||
// End position
|
||||
endPos := len(c.instructions)
|
||||
c.instructions[trueJumpPos].Operand = endPos
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("Unknown infix operator: %s", e.Operator))
|
||||
// Original infix expression compilation
|
||||
c.compileExpression(e.Left)
|
||||
c.compileExpression(e.Right)
|
||||
|
||||
// Generate the appropriate operation
|
||||
switch e.Operator {
|
||||
case "+":
|
||||
c.emit(types.OpAdd, 0)
|
||||
case "-":
|
||||
c.emit(types.OpSubtract, 0)
|
||||
case "*":
|
||||
c.emit(types.OpMultiply, 0)
|
||||
case "/":
|
||||
c.emit(types.OpDivide, 0)
|
||||
case "==":
|
||||
c.emit(types.OpEqual, 0)
|
||||
case "!=":
|
||||
c.emit(types.OpNotEqual, 0)
|
||||
case "<":
|
||||
c.emit(types.OpLessThan, 0)
|
||||
case ">":
|
||||
c.emit(types.OpGreaterThan, 0)
|
||||
case "<=":
|
||||
c.emit(types.OpLessEqual, 0)
|
||||
case ">=":
|
||||
c.emit(types.OpGreaterEqual, 0)
|
||||
default:
|
||||
panic(fmt.Sprintf("Unknown infix operator: %s", e.Operator))
|
||||
}
|
||||
}
|
||||
|
||||
case *parser.PrefixExpression:
|
||||
@ -195,6 +248,8 @@ func (c *compiler) compileExpression(expr parser.Expression) {
|
||||
switch e.Operator {
|
||||
case "-":
|
||||
c.emit(types.OpNegate, 0)
|
||||
case "not":
|
||||
c.emit(types.OpNot, 0)
|
||||
default:
|
||||
panic(fmt.Sprintf("Unknown prefix operator: %s", e.Operator))
|
||||
}
|
||||
|
@ -33,6 +33,10 @@ const (
|
||||
TokenLessEqual
|
||||
TokenGreaterEqual
|
||||
TokenEnd
|
||||
TokenAnd
|
||||
TokenOr
|
||||
TokenNot
|
||||
TokenElseIf
|
||||
)
|
||||
|
||||
type Token struct {
|
||||
@ -150,12 +154,20 @@ func (l *Lexer) NextToken() Token {
|
||||
tok.Type = TokenThen
|
||||
case "else":
|
||||
tok.Type = TokenElse
|
||||
case "elseif":
|
||||
tok.Type = TokenElseIf
|
||||
case "true":
|
||||
tok.Type = TokenTrue
|
||||
case "false":
|
||||
tok.Type = TokenFalse
|
||||
case "end":
|
||||
tok.Type = TokenEnd
|
||||
case "and":
|
||||
tok.Type = TokenAnd
|
||||
case "or":
|
||||
tok.Type = TokenOr
|
||||
case "not":
|
||||
tok.Type = TokenNot
|
||||
default:
|
||||
tok.Type = TokenIdentifier
|
||||
}
|
||||
|
118
parser/parser.go
118
parser/parser.go
@ -11,10 +11,14 @@ import (
|
||||
const (
|
||||
_ int = iota
|
||||
LOWEST
|
||||
SUM // +, -
|
||||
PRODUCT // *, /
|
||||
PREFIX // -X or !X
|
||||
INDEX // array[index]
|
||||
LOGICAL_OR // or
|
||||
LOGICAL_AND // and
|
||||
EQUALITY // ==, !=
|
||||
COMPARISON // <, >, <=, >=
|
||||
SUM // +, -
|
||||
PRODUCT // *, /
|
||||
PREFIX // -X or !X
|
||||
INDEX // array[index]
|
||||
)
|
||||
|
||||
var precedences = map[lexer.TokenType]int{
|
||||
@ -23,12 +27,14 @@ var precedences = map[lexer.TokenType]int{
|
||||
lexer.TokenStar: PRODUCT,
|
||||
lexer.TokenSlash: PRODUCT,
|
||||
lexer.TokenLeftBracket: INDEX,
|
||||
lexer.TokenEqualEqual: LOWEST + 1,
|
||||
lexer.TokenNotEqual: LOWEST + 1,
|
||||
lexer.TokenLessThan: LOWEST + 1,
|
||||
lexer.TokenGreaterThan: LOWEST + 1,
|
||||
lexer.TokenLessEqual: LOWEST + 1,
|
||||
lexer.TokenGreaterEqual: LOWEST + 1,
|
||||
lexer.TokenEqualEqual: EQUALITY,
|
||||
lexer.TokenNotEqual: EQUALITY,
|
||||
lexer.TokenLessThan: COMPARISON,
|
||||
lexer.TokenGreaterThan: COMPARISON,
|
||||
lexer.TokenLessEqual: COMPARISON,
|
||||
lexer.TokenGreaterEqual: COMPARISON,
|
||||
lexer.TokenAnd: LOGICAL_AND,
|
||||
lexer.TokenOr: LOGICAL_OR,
|
||||
}
|
||||
|
||||
type (
|
||||
@ -67,6 +73,7 @@ func New(l *lexer.Lexer) *Parser {
|
||||
p.registerPrefix(lexer.TokenThen, p.parseUnexpectedToken)
|
||||
p.registerPrefix(lexer.TokenTrue, p.parseBooleanLiteral)
|
||||
p.registerPrefix(lexer.TokenFalse, p.parseBooleanLiteral)
|
||||
p.registerPrefix(lexer.TokenNot, p.parsePrefixExpression)
|
||||
|
||||
// Initialize infix parse functions
|
||||
p.infixParseFns = make(map[lexer.TokenType]infixParseFn)
|
||||
@ -75,6 +82,8 @@ func New(l *lexer.Lexer) *Parser {
|
||||
p.registerInfix(lexer.TokenStar, p.parseInfixExpression)
|
||||
p.registerInfix(lexer.TokenSlash, p.parseInfixExpression)
|
||||
p.registerInfix(lexer.TokenLeftBracket, p.parseIndexExpression)
|
||||
p.registerInfix(lexer.TokenAnd, p.parseInfixExpression)
|
||||
p.registerInfix(lexer.TokenOr, p.parseInfixExpression)
|
||||
|
||||
// Register comparison operators
|
||||
p.registerInfix(lexer.TokenEqualEqual, p.parseInfixExpression)
|
||||
@ -469,8 +478,9 @@ func (p *Parser) parseIfExpression() Expression {
|
||||
consequence := &BlockStatement{Token: p.curToken}
|
||||
consequence.Statements = []Statement{}
|
||||
|
||||
// Parse statements until we hit 'else' or 'end'
|
||||
for !p.curTokenIs(lexer.TokenElse) && !p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) {
|
||||
// Parse statements until we hit 'else', 'elseif', or 'end'
|
||||
for !p.curTokenIs(lexer.TokenElse) && !p.curTokenIs(lexer.TokenElseIf) &&
|
||||
!p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) {
|
||||
stmt := p.parseStatement()
|
||||
consequence.Statements = append(consequence.Statements, stmt)
|
||||
p.nextToken()
|
||||
@ -478,6 +488,23 @@ func (p *Parser) parseIfExpression() Expression {
|
||||
|
||||
expression.Consequence = consequence
|
||||
|
||||
// Check for 'elseif'
|
||||
if p.curTokenIs(lexer.TokenElseIf) {
|
||||
// Create a block statement for the alternative
|
||||
alternative := &BlockStatement{Token: p.curToken}
|
||||
alternative.Statements = []Statement{}
|
||||
|
||||
// Parse the nested elseif as a new if expression
|
||||
nestedIf := p.parseElseIfExpression()
|
||||
|
||||
// Add it as an expression statement in the alternative block
|
||||
alternative.Statements = append(alternative.Statements,
|
||||
&ExpressionStatement{Token: p.curToken, Expression: nestedIf})
|
||||
|
||||
expression.Alternative = alternative
|
||||
return expression
|
||||
}
|
||||
|
||||
// Check for 'else'
|
||||
if p.curTokenIs(lexer.TokenElse) {
|
||||
p.nextToken() // Skip 'else'
|
||||
@ -506,6 +533,73 @@ func (p *Parser) parseIfExpression() Expression {
|
||||
return expression
|
||||
}
|
||||
|
||||
func (p *Parser) parseElseIfExpression() Expression {
|
||||
expression := &IfExpression{Token: p.curToken}
|
||||
|
||||
p.nextToken() // Skip 'elseif'
|
||||
|
||||
// Parse condition
|
||||
expression.Condition = p.parseExpression(LOWEST)
|
||||
|
||||
// Expect 'then' after condition
|
||||
if !p.expectPeek(lexer.TokenThen) {
|
||||
return nil
|
||||
}
|
||||
|
||||
p.nextToken() // Skip 'then'
|
||||
|
||||
// Create a block statement for the consequence
|
||||
consequence := &BlockStatement{Token: p.curToken}
|
||||
consequence.Statements = []Statement{}
|
||||
|
||||
// Parse statements until we hit 'else', 'elseif', or 'end'
|
||||
for !p.curTokenIs(lexer.TokenElse) && !p.curTokenIs(lexer.TokenElseIf) &&
|
||||
!p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) {
|
||||
stmt := p.parseStatement()
|
||||
consequence.Statements = append(consequence.Statements, stmt)
|
||||
p.nextToken()
|
||||
}
|
||||
|
||||
expression.Consequence = consequence
|
||||
|
||||
// Handle nested elseif
|
||||
if p.curTokenIs(lexer.TokenElseIf) {
|
||||
// Create a block statement for the alternative
|
||||
alternative := &BlockStatement{Token: p.curToken}
|
||||
alternative.Statements = []Statement{}
|
||||
|
||||
// Parse the nested elseif recursively
|
||||
nestedIf := p.parseElseIfExpression()
|
||||
|
||||
// Add it as an expression statement in the alternative block
|
||||
alternative.Statements = append(alternative.Statements,
|
||||
&ExpressionStatement{Token: p.curToken, Expression: nestedIf})
|
||||
|
||||
expression.Alternative = alternative
|
||||
return expression
|
||||
}
|
||||
|
||||
// Handle else
|
||||
if p.curTokenIs(lexer.TokenElse) {
|
||||
p.nextToken() // Skip 'else'
|
||||
|
||||
// Create a block statement for the alternative
|
||||
alternative := &BlockStatement{Token: p.curToken}
|
||||
alternative.Statements = []Statement{}
|
||||
|
||||
// Parse statements until we hit 'end'
|
||||
for !p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) {
|
||||
stmt := p.parseStatement()
|
||||
alternative.Statements = append(alternative.Statements, stmt)
|
||||
p.nextToken()
|
||||
}
|
||||
|
||||
expression.Alternative = alternative
|
||||
}
|
||||
|
||||
return expression
|
||||
}
|
||||
|
||||
func (p *Parser) parseErrorToken() Expression {
|
||||
msg := fmt.Sprintf("unexpected token: %s", p.curToken.Value)
|
||||
p.errors = append(p.errors, msg)
|
||||
|
@ -39,6 +39,7 @@ const (
|
||||
OpGreaterThan
|
||||
OpLessEqual
|
||||
OpGreaterEqual
|
||||
OpNot
|
||||
)
|
||||
|
||||
type Instruction struct {
|
||||
|
16
vm/vm.go
16
vm/vm.go
@ -409,6 +409,22 @@ func (vm *VM) Run(bytecode *types.Bytecode) {
|
||||
fmt.Println("Error: cannot compare non-number values with >=")
|
||||
vm.push(types.NewBoolean(false))
|
||||
}
|
||||
|
||||
case types.OpNot:
|
||||
operand := vm.pop()
|
||||
|
||||
// Consider falsy: false, null, 0
|
||||
isFalsy := false
|
||||
|
||||
if operand.Type == types.TypeBoolean && !operand.Data.(bool) {
|
||||
isFalsy = true
|
||||
} else if operand.Type == types.TypeNull {
|
||||
isFalsy = true
|
||||
} else if operand.Type == types.TypeNumber && operand.Data.(float64) == 0 {
|
||||
isFalsy = true
|
||||
}
|
||||
|
||||
vm.push(types.NewBoolean(!isFalsy))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user