|
@@ -8,6 +8,18 @@ import (
|
|
|
"code.osinet.fr/fgm/waiig15/token"
|
|
|
)
|
|
|
|
|
|
+// Precedence constants.
|
|
|
+const (
|
|
|
+ _ int = iota
|
|
|
+ LOWEST
|
|
|
+ EQUALS // ==
|
|
|
+ LESSGREATER // > or <
|
|
|
+ SUM // +
|
|
|
+ PRODUCT // *
|
|
|
+ PREFIX // -X or !X
|
|
|
+ CALL // myFunction(X)
|
|
|
+)
|
|
|
+
|
|
|
// Parser implements the parsing mechanism top-level layer.
|
|
|
type Parser struct {
|
|
|
errors []string
|
|
@@ -15,8 +27,16 @@ type Parser struct {
|
|
|
|
|
|
curToken token.Token
|
|
|
peekToken token.Token
|
|
|
+
|
|
|
+ prefixParseFns map[token.TokenType]prefixParseFn
|
|
|
+ infixParseFns map[token.TokenType]infixParseFn
|
|
|
}
|
|
|
|
|
|
+type (
|
|
|
+ prefixParseFn func() ast.Expression
|
|
|
+ infixParseFn func(ast.Expression) ast.Expression
|
|
|
+)
|
|
|
+
|
|
|
// New returns a new Parser instance with the first two parser tokens already
|
|
|
// loaded.
|
|
|
func New(l *lexer.Lexer) *Parser {
|
|
@@ -25,6 +45,9 @@ func New(l *lexer.Lexer) *Parser {
|
|
|
errors: []string{},
|
|
|
}
|
|
|
|
|
|
+ p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
|
|
|
+ p.registerPrefix(token.IDENT, p.parseIdentifier)
|
|
|
+
|
|
|
// Read two tokens, so curToken and peeToken are both set.
|
|
|
p.nextToken()
|
|
|
p.nextToken()
|
|
@@ -83,7 +106,7 @@ func (p *Parser) parseStatement() ast.Statement {
|
|
|
case token.RETURN:
|
|
|
return p.parseReturnStatement()
|
|
|
default:
|
|
|
- return nil
|
|
|
+ return p.parseExpressionStatement()
|
|
|
}
|
|
|
}
|
|
|
|