From 67d586c789f4688a1f9bb3ef0d42dad33073475b Mon Sep 17 00:00:00 2001 From: Sky Johnson Date: Tue, 6 May 2025 15:24:49 -0500 Subject: [PATCH] fix if blocks, remove braces for blocks --- compiler/compiler.go | 3 +- parser/parser.go | 75 +++++++++++++++++++++----------------------- 2 files changed, 37 insertions(+), 41 deletions(-) diff --git a/compiler/compiler.go b/compiler/compiler.go index 3037ceb..36eff0d 100644 --- a/compiler/compiler.go +++ b/compiler/compiler.go @@ -91,12 +91,11 @@ func (c *compiler) compileStatement(stmt parser.Statement) { c.compileExpression(s.Value) c.emit(types.OpEcho, 0) + // BlockStatement now should only be used for keyword blocks like if-then-else-end case *parser.BlockStatement: - c.enterScope() for _, blockStmt := range s.Statements { c.compileStatement(blockStmt) } - c.exitScope() case *parser.ExpressionStatement: c.compileExpression(s.Expression) diff --git a/parser/parser.go b/parser/parser.go index 9b1e1eb..9f304a6 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -62,8 +62,9 @@ func New(l *lexer.Lexer) *Parser { p.registerPrefix(lexer.TokenMinus, p.parsePrefixExpression) p.registerPrefix(lexer.TokenLeftParen, p.parseGroupedExpression) p.registerPrefix(lexer.TokenIf, p.parseIfExpression) - p.registerPrefix(lexer.TokenElse, p.parseErrorToken) - p.registerPrefix(lexer.TokenEnd, p.parseErrorToken) + p.registerPrefix(lexer.TokenElse, p.parseUnexpectedToken) + p.registerPrefix(lexer.TokenEnd, p.parseUnexpectedToken) + p.registerPrefix(lexer.TokenThen, p.parseUnexpectedToken) p.registerPrefix(lexer.TokenTrue, p.parseBooleanLiteral) p.registerPrefix(lexer.TokenFalse, p.parseBooleanLiteral) @@ -167,8 +168,6 @@ func (p *Parser) parseStatement() Statement { return p.parseExpressionStatement() case lexer.TokenEcho: return p.parseEchoStatement() - case lexer.TokenLeftBrace: - return p.parseBlockStatement() default: return p.parseExpressionStatement() } @@ -337,7 +336,7 @@ func (p *Parser) parseNumberLiteral() Expression { func (p *Parser) parseTableLiteral() Expression { table := &TableLiteral{ - Token: p.curToken, + Token: p.curToken, // This should be '{' Pairs: make(map[Expression]Expression), } @@ -461,54 +460,46 @@ func (p *Parser) parseIfExpression() Expression { // Expect 'then' after condition if !p.expectPeek(lexer.TokenThen) { - p.errors = append(p.errors, "expected 'then' after if condition") return nil } p.nextToken() // Skip 'then' - // Parse consequence (then block) - if p.curTokenIs(lexer.TokenLeftBrace) { - expression.Consequence = p.parseBlockStatement() - } else { - // For single statement without braces - stmt := &BlockStatement{Token: p.curToken} - stmt.Statements = []Statement{p.parseStatement()} - expression.Consequence = stmt + // Create a block statement for the consequence + consequence := &BlockStatement{Token: p.curToken} + consequence.Statements = []Statement{} + + // Parse statements until we hit 'else' or 'end' + for !p.curTokenIs(lexer.TokenElse) && !p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) { + stmt := p.parseStatement() + consequence.Statements = append(consequence.Statements, stmt) + p.nextToken() } + expression.Consequence = consequence + // Check for 'else' - if p.peekTokenIs(lexer.TokenElse) { - p.nextToken() // Move to 'else' + if p.curTokenIs(lexer.TokenElse) { p.nextToken() // Skip 'else' - // Parse alternative (else block) - if p.curTokenIs(lexer.TokenLeftBrace) { - expression.Alternative = p.parseBlockStatement() - } else { - // For single statement without braces - stmt := &BlockStatement{Token: p.curToken} - stmt.Statements = []Statement{p.parseStatement()} - expression.Alternative = stmt - } - } + // Create a block statement for the alternative + alternative := &BlockStatement{Token: p.curToken} + alternative.Statements = []Statement{} - // Check for 'end' - if p.peekTokenIs(lexer.TokenEnd) { - p.nextToken() // Consume 'end' - } else if !p.curTokenIs(lexer.TokenRightBrace) { - // Missing 'end' - p.errors = append(p.errors, fmt.Sprintf("line %d: expected 'end' to close if expression", - p.curToken.Line)) - - // Skip tokens until we find something that could be a valid statement start - for !p.curTokenIs(lexer.TokenEOF) && - !p.curTokenIs(lexer.TokenSemicolon) && - !p.curTokenIs(lexer.TokenIdentifier) && - !p.curTokenIs(lexer.TokenEcho) { + // Parse statements until we hit 'end' + for !p.curTokenIs(lexer.TokenEnd) && !p.curTokenIs(lexer.TokenEOF) { + stmt := p.parseStatement() + alternative.Statements = append(alternative.Statements, stmt) p.nextToken() } + expression.Alternative = alternative + } + + // We should now be at the 'end' token + if !p.curTokenIs(lexer.TokenEnd) { + p.errors = append(p.errors, fmt.Sprintf("line %d: expected 'end' to close if expression", + p.curToken.Line)) return nil } @@ -520,3 +511,9 @@ func (p *Parser) parseErrorToken() Expression { p.errors = append(p.errors, msg) return nil } + +func (p *Parser) parseUnexpectedToken() Expression { + p.errors = append(p.errors, fmt.Sprintf("line %d: unexpected token: %s", + p.curToken.Line, p.curToken.Value)) + return nil +}