Browse Source

§2.6 Parsing identifiers.

Frederic G. MARAND 5 years ago
parent
commit
56158a4b80

+ 24 - 1
parser/parser.go

@@ -8,6 +8,18 @@ import (
 	"code.osinet.fr/fgm/waiig15/token"
 )
 
+// Precedence constants.
+const (
+	_ int = iota
+	LOWEST
+	EQUALS      // ==
+	LESSGREATER // > or <
+	SUM         // +
+	PRODUCT     // *
+	PREFIX      // -X or !X
+	CALL        // myFunction(X)
+)
+
 // Parser implements the parsing mechanism top-level layer.
 type Parser struct {
 	errors []string
@@ -15,8 +27,16 @@ type Parser struct {
 
 	curToken  token.Token
 	peekToken token.Token
+
+	prefixParseFns map[token.TokenType]prefixParseFn
+	infixParseFns  map[token.TokenType]infixParseFn
 }
 
+type (
+	prefixParseFn func() ast.Expression
+	infixParseFn  func(ast.Expression) ast.Expression
+)
+
 // New returns a new Parser instance with the first two parser tokens already
 // loaded.
 func New(l *lexer.Lexer) *Parser {
@@ -25,6 +45,9 @@ func New(l *lexer.Lexer) *Parser {
 		errors: []string{},
 	}
 
+	p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
+	p.registerPrefix(token.IDENT, p.parseIdentifier)
+
 	// Read two tokens, so curToken and peeToken are both set.
 	p.nextToken()
 	p.nextToken()
@@ -83,7 +106,7 @@ func (p *Parser) parseStatement() ast.Statement {
 	case token.RETURN:
 		return p.parseReturnStatement()
 	default:
-		return nil
+		return p.parseExpressionStatement()
 	}
 }
 

+ 33 - 0
parser/parser_expression.go

@@ -0,0 +1,33 @@
+package parser
+
+import (
+	"code.osinet.fr/fgm/waiig15/ast"
+	"code.osinet.fr/fgm/waiig15/token"
+)
+
+func (p *Parser) parseExpression(precedent int) ast.Expression {
+	prefix := p.prefixParseFns[p.curToken.Type]
+	if prefix == nil {
+		return nil
+	}
+
+	leftExp := prefix()
+
+
+	return leftExp
+}
+
+func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
+	stmt := &ast.ExpressionStatement{
+		Token: p.curToken,
+	}
+
+	stmt.Expression = p.parseExpression(LOWEST)
+
+	// Semicolons are optional to help use REPL input.
+	if p.peekTokenIs(token.SEMICOLON) {
+		p.nextToken()
+	}
+
+	return stmt
+}

+ 12 - 0
parser/parser_identifier.go

@@ -0,0 +1,12 @@
+package parser
+
+import "code.osinet.fr/fgm/waiig15/ast"
+
+// parseIdentifier does not advance the tokens or call nextToken, and this is
+// important.
+func (p *Parser) parseIdentifier() ast.Expression {
+	return &ast.Identifier{
+		Token: p.curToken,
+		Value: p.curToken.Literal,
+	}
+}

+ 0 - 1
parser/parser_let.go

@@ -35,4 +35,3 @@ func (p *Parser) parseLetStatement() *ast.LetStatement {
 
 	return stmt
 }
-

+ 8 - 0
parser/parser_return.go

@@ -22,3 +22,11 @@ func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
 	return stmt
 }
 
+func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
+	p.infixParseFns[tokenType] = fn
+}
+
+func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
+	p.prefixParseFns[tokenType] = fn
+}
+

+ 42 - 0
parser/parset_identifier_test.go

@@ -0,0 +1,42 @@
+package parser
+
+import (
+		"testing"
+	"code.osinet.fr/fgm/waiig15/lexer"
+	"code.osinet.fr/fgm/waiig15/ast"
+)
+
+func TestIdentifierExpression(t *testing.T) {
+	const input = "foobar"
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParserErrors(t, p)
+
+	if len(program.Statements) != 1 {
+		t.Fatalf("program has not enough statements. got=%d",
+			len(program.Statements))
+	}
+
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. Got=%T",
+			program.Statements)
+	}
+
+	ident, ok := stmt.Expression.(*ast.Identifier)
+	if !ok {
+		t.Fatalf("exp not *ast.Identifier. got=%T", stmt.Expression)
+	}
+
+	// Why not use input instead of inline strings ?
+	if ident.Value != input {
+		t.Errorf("ident.Value not %s. got=%s", input,
+			ident.Value)
+	}
+	if ident.TokenLiteral() != input {
+		t.Errorf("ident.TokenLiteral not %s. got=%s", input,
+			ident.TokenLiteral())
+	}
+}