Browse Source

§2.8: restructured files to match author code more closely.

Frederic G. MARAND 5 years ago
parent
commit
d8072da4f9

+ 181 - 5
ast/ast.go

@@ -2,6 +2,8 @@ package ast
 
 import (
 	"bytes"
+
+	"code.osinet.fr/fgm/waiig15/token"
 )
 
 // Node is the interface implemented by every node in the AST. It extends the
@@ -32,6 +34,16 @@ type Program struct {
 	Statements []Statement
 }
 
+// TokenLiteral returns the string contents of the first statement token, which
+// can be an empty string if the program is empty.
+func (p *Program) TokenLiteral() string {
+	if len(p.Statements) > 0 {
+		return p.Statements[0].TokenLiteral()
+	}
+
+	return ""
+}
+
 // String satisfies the Node and fmt.Stringer interfaces.
 func (p *Program) String() string {
 	var out bytes.Buffer
@@ -43,12 +55,176 @@ func (p *Program) String() string {
 	return out.String()
 }
 
-// TokenLiteral returns the string contents of the first statement token, which
-// can be an empty string if the program is empty.
-func (p *Program) TokenLiteral() string {
-	if len(p.Statements) > 0 {
-		return p.Statements[0].TokenLiteral()
+// LetStatement is the Node type for Let statements.
+type LetStatement struct {
+	Token token.Token // the token.LET token. Why do we need it ?
+	Name  *Identifier
+	Value Expression
+}
+
+func (ls *LetStatement) statementNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (ls *LetStatement) TokenLiteral() string {
+	return ls.Token.Literal
+}
+
+// String implements Node and fmt.Stringer.
+func (ls *LetStatement) String() string {
+	var out bytes.Buffer
+
+	out.WriteString(ls.TokenLiteral() + " ")
+	out.WriteString(ls.Name.String())
+	out.WriteString(" = ")
+
+	if ls.Value != nil {
+		out.WriteString(ls.Value.String())
+	}
+
+	out.WriteString(";")
+
+	return out.String()
+}
+
+// ReturnStatement fulfills the Node and Statement interfaces.
+type ReturnStatement struct {
+	Token       token.Token // the token.RETURN token. Why do we need it ?
+	Name        *Identifier
+	ReturnValue Expression
+}
+
+func (rs *ReturnStatement) statementNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (rs *ReturnStatement) TokenLiteral() string {
+	return rs.Token.Literal
+}
+
+// String satisfies the Node and fmt.Stringer interfaces.
+func (rs *ReturnStatement) String() string {
+	var out bytes.Buffer
+
+	out.WriteString(rs.TokenLiteral() + " ")
+
+	if rs.ReturnValue != nil {
+		out.WriteString(rs.ReturnValue.String())
 	}
 
+	out.WriteString(";")
+
+	return out.String()
+}
+
+// ExpressionStatement fulfills the Node and Statement interfaces.
+// It represents a statement made of a bare expression like:
+//   x + 10;
+type ExpressionStatement struct {
+	Token      token.Token // the first token of the expression
+	Expression Expression
+}
+
+func (es *ExpressionStatement) statementNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (es *ExpressionStatement) TokenLiteral() string {
+	return es.Token.Literal
+}
+
+// String satisfies the Node and fmt.Stringer interfaces.
+func (es *ExpressionStatement) String() string {
+	if es.Expression != nil {
+		return es.Expression.String()
+	}
 	return ""
 }
+
+// Identifier is the Node type for identifiers.
+type Identifier struct {
+	Token token.Token // the token.IDENT token. Why do we need it ?
+	Value string      // The identifier string.
+}
+
+func (i *Identifier) expressionNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (i *Identifier) TokenLiteral() string {
+	return i.Token.Literal
+}
+
+func (i *Identifier) String() string {
+	return i.Value
+}
+
+// IntegerLiteral fulfills ast.Expression.
+type IntegerLiteral struct {
+	Token token.Token
+	Value int64
+}
+
+func (il *IntegerLiteral) expressionNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (il *IntegerLiteral) TokenLiteral() string {
+	return il.Token.Literal
+}
+func (il *IntegerLiteral) String() string {
+	return il.Token.Literal
+}
+
+// PrefixExpression fulfills the Node and Statement interfaces.
+// It represents a prefixed expression like:
+// "-5;"
+type PrefixExpression struct {
+	Token    token.Token // The prefix token, e.g. !
+	Operator string
+	Right    Expression
+}
+
+func (pe *PrefixExpression) expressionNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (pe *PrefixExpression) TokenLiteral() string {
+	return pe.Token.Literal
+}
+
+// String satisfies the Node and fmt.Stringer interfaces.
+func (pe *PrefixExpression) String() string {
+	var out bytes.Buffer
+
+	out.WriteString("(")
+	out.WriteString(pe.Operator)
+	out.WriteString(pe.Right.String())
+	out.WriteString(")")
+
+	return out.String()
+}
+
+// InfixExpression fulfills the Node and Statement interfaces.
+// It represents a prefixed expression like:
+// "-5;"
+type InfixExpression struct {
+	Token    token.Token // The operator token, e.g. +
+	Left     Expression
+	Operator string
+	Right    Expression
+}
+
+func (ie *InfixExpression) expressionNode() {}
+
+// TokenLiteral satisfies the Node interface.
+func (ie *InfixExpression) TokenLiteral() string {
+	return ie.Token.Literal
+}
+
+// String satisfies the Node and fmt.Stringer interfaces.
+func (ie *InfixExpression) String() string {
+	var out bytes.Buffer
+
+	out.WriteString("(")
+	out.WriteString(ie.Left.String())
+	out.WriteString(" " + ie.Operator + " ")
+	out.WriteString(ie.Right.String())
+	out.WriteString(")")
+
+	return out.String()
+}

+ 1 - 1
ast/ast_string_test.go → ast/ast_test.go

@@ -35,6 +35,6 @@ func TestString(t *testing.T) {
 
 	programString := program.String()
 	if programString != "let myVar = anotherVar;" {
-		t.Errorf("program.String() wrong, got=%q", programString)
+		t.Errorf("program.String() wrong. got=%q", programString)
 	}
 }

+ 0 - 29
ast/expression.go

@@ -1,29 +0,0 @@
-package ast
-
-import (
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-// ExpressionStatement fulfills the Node and Statement interfaces.
-// It represents a statement made of a bare expression like:
-//   x + 10;
-type ExpressionStatement struct {
-	Token      token.Token // the first token of the expression
-	Expression Expression
-}
-
-// String satisfies the Node and fmt.Stringer interfaces.
-func (es *ExpressionStatement) String() string {
-	if es.Expression != nil {
-		return es.Expression.String()
-	}
-
-	return ""
-}
-
-func (es *ExpressionStatement) statementNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (es *ExpressionStatement) TokenLiteral() string {
-	return es.Token.Literal
-}

+ 0 - 20
ast/identifier.go

@@ -1,20 +0,0 @@
-package ast
-
-import "code.osinet.fr/fgm/waiig15/token"
-
-// Identifier is the Node type for identifiers.
-type Identifier struct {
-	Token token.Token // the token.IDENT token. Why do we need it ?
-	Value string      // The identifier string.
-}
-
-func (i *Identifier) String() string {
-	return i.Value
-}
-
-func (i *Identifier) expressionNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (i *Identifier) TokenLiteral() string {
-	return i.Token.Literal
-}

+ 0 - 37
ast/infix_expression.go

@@ -1,37 +0,0 @@
-package ast
-
-import (
-	"bytes"
-
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-// InfixExpression fulfills the Node and Statement interfaces.
-// It represents a prefixed expression like:
-// "-5;"
-type InfixExpression struct {
-	Token    token.Token
-	Left     Expression
-	Operator string
-	Right    Expression
-}
-
-// String satisfies the Node and fmt.Stringer interfaces.
-func (ie *InfixExpression) String() string {
-	var out bytes.Buffer
-
-	out.WriteString("(")
-	out.WriteString(ie.Left.String())
-	out.WriteString(" " + ie.Operator + " ")
-	out.WriteString(ie.Right.String())
-	out.WriteString(")")
-
-	return out.String()
-}
-
-func (ie *InfixExpression) expressionNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (ie *InfixExpression) TokenLiteral() string {
-	return ie.Token.Literal
-}

+ 0 - 19
ast/int.go

@@ -1,19 +0,0 @@
-package ast
-
-import "code.osinet.fr/fgm/waiig15/token"
-
-// IntegerLiteral fulfills ast.Expression.
-type IntegerLiteral struct {
-	Token token.Token
-	Value int64
-}
-
-func (il *IntegerLiteral) expressionNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (il *IntegerLiteral) TokenLiteral() string {
-	return il.Token.Literal
-}
-func (il *IntegerLiteral) String() string {
-	return il.Token.Literal
-}

+ 0 - 37
ast/let.go

@@ -1,37 +0,0 @@
-package ast
-
-import (
-	"bytes"
-
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-// LetStatement is the Node type for Let statements.
-type LetStatement struct {
-	Token token.Token // the token.LET token. Why do we need it ?
-	Name  *Identifier
-	Value Expression
-}
-
-// String implements Node and fmt.Stringer.
-func (ls *LetStatement) String() string {
-	var out bytes.Buffer
-
-	out.WriteString(ls.TokenLiteral() + " ")
-	out.WriteString(ls.Name.String())
-	out.WriteString(" = ")
-	if ls.Value != nil {
-		out.WriteString(ls.Value.String())
-	}
-
-	out.WriteString(";")
-
-	return out.String()
-}
-
-func (ls *LetStatement) statementNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (ls *LetStatement) TokenLiteral() string {
-	return ls.Token.Literal
-}

+ 0 - 35
ast/prefix_expression.go

@@ -1,35 +0,0 @@
-package ast
-
-import (
-	"bytes"
-
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-// PrefixExpression fulfills the Node and Statement interfaces.
-// It represents a prefixed expression like:
-// "-5;"
-type PrefixExpression struct {
-	Token    token.Token
-	Operator string
-	Right    Expression
-}
-
-// String satisfies the Node and fmt.Stringer interfaces.
-func (pe *PrefixExpression) String() string {
-	var out bytes.Buffer
-
-	out.WriteString("(")
-	out.WriteString(pe.Operator)
-	out.WriteString(pe.Right.String())
-	out.WriteString(")")
-
-	return out.String()
-}
-
-func (pe *PrefixExpression) expressionNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (pe *PrefixExpression) TokenLiteral() string {
-	return pe.Token.Literal
-}

+ 0 - 36
ast/return.go

@@ -1,36 +0,0 @@
-package ast
-
-import (
-	"bytes"
-
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-// ReturnStatement fulfills the Node and Statement interfaces.
-type ReturnStatement struct {
-	Token       token.Token // the token.RETURN token. Why do we need it ?
-	Name        *Identifier
-	ReturnValue Expression
-}
-
-// String satisfies the Node and fmt.Stringer interfaces.
-func (rs *ReturnStatement) String() string {
-	var out bytes.Buffer
-
-	out.WriteString(rs.TokenLiteral() + " ")
-
-	if rs.ReturnValue != nil {
-		out.WriteString(rs.ReturnValue.String())
-	}
-
-	out.WriteString(";")
-
-	return out.String()
-}
-
-func (rs *ReturnStatement) statementNode() {}
-
-// TokenLiteral satisfies the Node interface.
-func (rs *ReturnStatement) TokenLiteral() string {
-	return rs.Token.Literal
-}

+ 28 - 28
lexer/lexer.go

@@ -25,17 +25,6 @@ func New(input string) *Lexer {
 	return l
 }
 
-// Give us the next character and advance our position in the input string.
-func (l *Lexer) readChar() {
-	if l.readPosition >= len(l.input) {
-		l.ch = 0
-	} else {
-		l.ch = l.input[l.readPosition]
-	}
-	l.position = l.readPosition
-	l.readPosition++
-}
-
 // NextToken advances in the input by one token, skipping all whitespace. It
 // returns that token. In case of a lexing error it return an ILLEGAL token.
 func (l *Lexer) NextToken() token.Token {
@@ -53,10 +42,6 @@ func (l *Lexer) NextToken() token.Token {
 		} else {
 			tok = newToken(token.ASSIGN, l.ch)
 		}
-	case '(':
-		tok = newToken(token.LPAREN, l.ch)
-	case ')':
-		tok = newToken(token.RPAREN, l.ch)
 	case '+':
 		tok = newToken(token.PLUS, l.ch)
 	case '-':
@@ -86,6 +71,10 @@ func (l *Lexer) NextToken() token.Token {
 		tok = newToken(token.LBRACE, l.ch)
 	case '}':
 		tok = newToken(token.RBRACE, l.ch)
+	case '(':
+		tok = newToken(token.LPAREN, l.ch)
+	case ')':
+		tok = newToken(token.RPAREN, l.ch)
 	case 0:
 		tok.Literal = ""
 		tok.Type = token.EOF
@@ -109,8 +98,24 @@ func (l *Lexer) NextToken() token.Token {
 	return tok
 }
 
-func newToken(tokenType token.TokenType, ch byte) token.Token {
-	return token.Token{Type: tokenType, Literal: string(ch)}
+func (l *Lexer) skipWhitespace() {
+	for l.ch == ' ' ||
+		l.ch == '\r' ||
+		l.ch == '\t' ||
+		l.ch == '\n' {
+		l.readChar()
+	}
+}
+
+// Give us the next character and advance our position in the input string.
+func (l *Lexer) readChar() {
+	if l.readPosition >= len(l.input) {
+		l.ch = 0
+	} else {
+		l.ch = l.input[l.readPosition]
+	}
+	l.position = l.readPosition
+	l.readPosition++
 }
 
 func (l *Lexer) peekChar() byte {
@@ -137,19 +142,14 @@ func (l *Lexer) readNumber() string {
 	return l.input[position:l.position]
 }
 
-func isDigit(ch byte) bool {
-	return '0' <= ch && ch <= '9'
-}
-
 func isLetter(ch byte) bool {
 	return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_'
 }
 
-func (l *Lexer) skipWhitespace() {
-	for l.ch == ' ' ||
-		l.ch == '\r' ||
-		l.ch == '\t' ||
-		l.ch == '\n' {
-		l.readChar()
-	}
+func isDigit(ch byte) bool {
+	return '0' <= ch && ch <= '9'
+}
+
+func newToken(tokenType token.TokenType, ch byte) token.Token {
+	return token.Token{Type: tokenType, Literal: string(ch)}
 }

+ 10 - 8
lexer/lexer_test.go

@@ -13,7 +13,7 @@ let ten  = 10;
 
 let add = fn(x, y) {
   x + y;
-}
+};
 
 let result = add(five, ten);
 !-/*5;
@@ -64,8 +64,9 @@ $
 		{token.IDENT, "y"},
 		{token.SEMICOLON, ";"},
 		{token.RBRACE, "}"},
+		{token.SEMICOLON, ";"},
 
-		// 25
+		// 26
 		{token.LET, "let"},
 		{token.IDENT, "result"},
 		{token.ASSIGN, "="},
@@ -77,7 +78,7 @@ $
 		{token.RPAREN, ")"},
 		{token.SEMICOLON, ";"},
 
-		// 35
+		// 36
 		{token.BANG, "!"},
 		{token.MINUS, "-"},
 		{token.SLASH, "/"},
@@ -85,7 +86,7 @@ $
 		{token.INT, "5"},
 		{token.SEMICOLON, ";"},
 
-		// 41
+		// 42
 		{token.INT, "5"},
 		{token.LT, "<"},
 		{token.INT, "10"},
@@ -93,7 +94,7 @@ $
 		{token.INT, "5"},
 		{token.SEMICOLON, ";"},
 
-		// 47
+		// 48
 		{token.IF, "if"},
 		{token.LPAREN, "("},
 		{token.INT, "5"},
@@ -112,7 +113,7 @@ $
 		{token.SEMICOLON, ";"},
 		{token.RBRACE, "}"},
 
-		// 64
+		// 65
 		{token.INT, "10"},
 		{token.EQ, "=="},
 		{token.INT, "10"},
@@ -122,6 +123,7 @@ $
 		{token.INT, "9"},
 		{token.SEMICOLON, ";"},
 
+		// 73
 		{token.ILLEGAL, "$"},
 
 		{token.EOF, ""},
@@ -133,12 +135,12 @@ $
 		tok := l.NextToken()
 
 		if tok.Type != tt.expectedType {
-			t.Fatalf("tests[%d] - tokentype wrong, expected %q, got %q",
+			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
 				i, tt.expectedType, tok.Type)
 		}
 
 		if tok.Literal != tt.expectedLiteral {
-			t.Fatalf("tests[%d] - literal wrong, expected %q, got %q",
+			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
 				i, tt.expectedLiteral, tok.Literal)
 		}
 	}

+ 226 - 63
parser/parser.go

@@ -3,6 +3,8 @@ package parser
 import (
 	"fmt"
 
+	"strconv"
+
 	"code.osinet.fr/fgm/waiig15/ast"
 	"code.osinet.fr/fgm/waiig15/lexer"
 	"code.osinet.fr/fgm/waiig15/token"
@@ -20,23 +22,6 @@ const (
 	CALL        // myFunction(X)
 )
 
-// Parser implements the parsing mechanism top-level layer.
-type Parser struct {
-	errors []string
-	l      *lexer.Lexer
-
-	curToken  token.Token
-	peekToken token.Token
-
-	prefixParseFns map[token.TokenType]prefixParseFn
-	infixParseFns  map[token.TokenType]infixParseFn
-}
-
-type (
-	prefixParseFn func() ast.Expression
-	infixParseFn  func(ast.Expression) ast.Expression
-)
-
 var precedences = map[token.TokenType]int{
 	token.EQ:       EQUALS,
 	token.NOT_EQ:   EQUALS,
@@ -48,6 +33,23 @@ var precedences = map[token.TokenType]int{
 	token.ASTERISK: PRODUCT,
 }
 
+type (
+	prefixParseFn func() ast.Expression
+	infixParseFn  func(ast.Expression) ast.Expression
+)
+
+// Parser implements the parsing mechanism top-level layer.
+type Parser struct {
+	l      *lexer.Lexer
+	errors []string
+
+	curToken  token.Token
+	peekToken token.Token
+
+	prefixParseFns map[token.TokenType]prefixParseFn
+	infixParseFns  map[token.TokenType]infixParseFn
+}
+
 // New returns a new Parser instance with the first two parser tokens already
 // loaded.
 func New(l *lexer.Lexer) *Parser {
@@ -56,6 +58,12 @@ func New(l *lexer.Lexer) *Parser {
 		errors: []string{},
 	}
 
+	p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
+	p.registerPrefix(token.IDENT, p.parseIdentifier)
+	p.registerPrefix(token.INT, p.parseIntegerLiteral)
+	p.registerPrefix(token.BANG, p.parsePrefixExpression)
+	p.registerPrefix(token.MINUS, p.parsePrefixExpression)
+
 	p.infixParseFns = make(map[token.TokenType]infixParseFn)
 	for _, tok := range []token.TokenType{
 		token.ASTERISK,
@@ -70,24 +78,59 @@ func New(l *lexer.Lexer) *Parser {
 		p.registerInfix(tok, p.parseInfixExpression)
 	}
 
-	p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
-	p.registerPrefix(token.BANG, p.parsePrefixExpression)
-	p.registerPrefix(token.IDENT, p.parseIdentifier)
-	p.registerPrefix(token.INT, p.parseIntegerLiteral)
-	p.registerPrefix(token.MINUS, p.parsePrefixExpression)
-
-	// Read two tokens, so curToken and peeToken are both set.
+	// Read two tokens, so curToken and peekToken are both set
 	p.nextToken()
 	p.nextToken()
 
 	return p
 }
 
+func (p *Parser) nextToken() {
+	p.curToken = p.peekToken
+	p.peekToken = p.l.NextToken()
+}
+
+// Is the current token in the parser of the given type ?
+func (p *Parser) curTokenIs(t token.TokenType) bool {
+	return p.curToken.Type == t
+}
+
+// Is the next token in the parser of the given type ? Don't consume it.
+func (p *Parser) peekTokenIs(t token.TokenType) bool {
+	return p.peekToken.Type == t
+}
+
+// Is the next token in the parser of the given type ? If it is, consume it,
+// else don't.
+func (p *Parser) expectPeek(t token.TokenType) bool {
+	if p.peekTokenIs(t) {
+		p.nextToken()
+		return true
+	}
+
+	p.peekError(t)
+	return false
+}
+
 // Errors is a getter for Parser.errors.
 func (p *Parser) Errors() []string {
 	return p.errors
 }
 
+// Log a mismatch error on the peek token type in the parser instance.
+//
+//   - t is the type of token that was expected
+func (p *Parser) peekError(t token.TokenType) {
+	msg := fmt.Sprintf("expected next token to be %s, got %s instead",
+		t, p.peekToken.Type)
+	p.errors = append(p.errors, msg)
+}
+
+func (p *Parser) noPrefixParseFnError(t token.TokenType) {
+	msg := fmt.Sprintf("no prefix parse function for %s found", t)
+	p.errors = append(p.errors, msg)
+}
+
 // ParseProgram is the outermost parsing logic, accumulating statements in a
 // Program instance and returning that instance once parsing is done.
 func (p *Parser) ParseProgram() *ast.Program {
@@ -106,57 +149,105 @@ func (p *Parser) ParseProgram() *ast.Program {
 	return program
 }
 
-// Return the precedence for the current token without advancing.
-func (p *Parser) curPrecedence() int {
-	if precedence, ok := precedences[p.curToken.Type]; ok {
-		return precedence
+func (p *Parser) parseStatement() ast.Statement {
+	defer untrace(trace("parseStatement"))
+	switch p.curToken.Type {
+	case token.LET:
+		return p.parseLetStatement()
+	case token.RETURN:
+		return p.parseReturnStatement()
+	default:
+		return p.parseExpressionStatement()
 	}
+}
 
-	return LOWEST
+func (p *Parser) parseLetStatement() *ast.LetStatement {
+	defer untrace(trace("parseLetStatement"))
+	stmt := &ast.LetStatement{
+		Token: p.curToken,
+	}
 
-}
+	// Let statement starts with an IDENT token, so if next token is not an
+	// IDENT, the next statement cannot be a Let statement.
+	if !p.expectPeek(token.IDENT) {
+		return nil
+	}
 
-// Is the current token in the parser of the given type ?
-func (p *Parser) curTokenIs(t token.TokenType) bool {
-	return p.curToken.Type == t
-}
+	stmt.Name = &ast.Identifier{
+		Token: p.curToken,
+		Value: p.curToken.Literal,
+	}
 
-// Is the next token in the parser of the given type ? If it is, consume it,
-// else don't.
-func (p *Parser) expectPeek(t token.TokenType) bool {
-	if p.peekTokenIs(t) {
+	// The previous expectPeek() call fetched the next token, so we should now
+	// be on the assignment.
+	if !p.expectPeek(token.ASSIGN) {
+		return nil
+	}
+
+	// Skip the expression for now, progress to the semicolon terminating the
+	// statement.
+	for !p.curTokenIs(token.SEMICOLON) {
 		p.nextToken()
-		return true
 	}
 
-	p.peekError(t)
-	return false
+	return stmt
 }
 
-func (p *Parser) nextToken() {
-	p.curToken = p.peekToken
-	p.peekToken = p.l.NextToken()
+func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
+	defer untrace(trace("parseReturnStatement"))
+	stmt := &ast.ReturnStatement{
+		Token: p.curToken,
+	}
+
+	// There should be an expression to consume here.
+	p.nextToken()
+
+	// Skip the expression for now, progress to the semicolon terminating the
+	// statement.
+	for !p.curTokenIs(token.SEMICOLON) {
+		p.nextToken()
+	}
+
+	return stmt
 }
 
-func (p *Parser) parseStatement() ast.Statement {
-	defer untrace(trace("parseStatement"))
-	switch p.curToken.Type {
-	case token.LET:
-		return p.parseLetStatement()
-	case token.RETURN:
-		return p.parseReturnStatement()
-	default:
-		return p.parseExpressionStatement()
+func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
+	defer untrace(trace("parseExpressionStatement"))
+	stmt := &ast.ExpressionStatement{
+		Token: p.curToken,
+	}
+
+	stmt.Expression = p.parseExpression(LOWEST)
+
+	// Semicolons are optional to help use REPL input.
+	if p.peekTokenIs(token.SEMICOLON) {
+		p.nextToken()
 	}
+
+	return stmt
 }
 
-// Log a mismatch error on the peek token type in the parser instance.
-//
-//   - t is the type of token that was expected
-func (p *Parser) peekError(t token.TokenType) {
-	msg := fmt.Sprintf("expected next token to be %s, got %s instead",
-		t, p.peekToken.Type)
-	p.errors = append(p.errors, msg)
+func (p *Parser) parseExpression(precedence int) ast.Expression {
+	defer untrace(trace("parseExpression"))
+	prefix := p.prefixParseFns[p.curToken.Type]
+	if prefix == nil {
+		p.noPrefixParseFnError(p.curToken.Type)
+		return nil
+	}
+	leftExp := prefix()
+
+	for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
+		infix := p.infixParseFns[p.peekToken.Type]
+		if infix == nil {
+			return leftExp
+		}
+
+		p.nextToken()
+
+		leftExp = infix(leftExp)
+	}
+
+	return leftExp
 }
 
 // Look forward for the precedence of the next token without advancing.
@@ -168,7 +259,79 @@ func (p *Parser) peekPrecedence() int {
 	return LOWEST
 }
 
-// Is the next token in the parser of the given type ? Don't consume it.
-func (p *Parser) peekTokenIs(t token.TokenType) bool {
-	return p.peekToken.Type == t
+// Return the precedence for the current token without advancing.
+func (p *Parser) curPrecedence() int {
+	if precedence, ok := precedences[p.curToken.Type]; ok {
+		return precedence
+	}
+
+	return LOWEST
+}
+
+// parseIdentifier does not advance the tokens or call nextToken, and this is
+// important.
+func (p *Parser) parseIdentifier() ast.Expression {
+	return &ast.Identifier{
+		Token: p.curToken,
+		Value: p.curToken.Literal,
+	}
+}
+
+func (p *Parser) parseIntegerLiteral() ast.Expression {
+	defer untrace(trace("parseIntegerLiteral"))
+	lit := &ast.IntegerLiteral{
+		Token: p.curToken,
+	}
+
+	// Base 0 allows straight interpretation of octal 0755 or hex 0xABCD.
+	value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
+	if err != nil {
+		msg := fmt.Sprintf("could not parse %q as integer",
+			p.curToken.Literal)
+		p.errors = append(p.errors, msg)
+		return nil
+	}
+
+	lit.Value = value
+
+	return lit
+}
+
+func (p *Parser) parsePrefixExpression() ast.Expression {
+	defer untrace(trace("parsePrefixExpression"))
+	expression := &ast.PrefixExpression{
+		Token:    p.curToken,
+		Operator: p.curToken.Literal,
+	}
+
+	// Consume the operator token to progress to the prefixed expression.
+	p.nextToken()
+
+	// The precedence is now that of the prefix operator instead of the lowest.
+	expression.Right = p.parseExpression(PREFIX)
+
+	return expression
+}
+
+func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
+	defer untrace(trace("parseInfixExpression"))
+	expression := &ast.InfixExpression{
+		Token:    p.curToken,
+		Operator: p.curToken.Literal,
+		Left:     left,
+	}
+
+	precedence := p.curPrecedence()
+	p.nextToken()
+	expression.Right = p.parseExpression(precedence)
+
+	return expression
+}
+
+func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
+	p.prefixParseFns[tokenType] = fn
+}
+
+func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
+	p.infixParseFns[tokenType] = fn
 }

+ 0 - 52
parser/parser_expression.go

@@ -1,52 +0,0 @@
-package parser
-
-import (
-	"fmt"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-func (p *Parser) noPrefixParseFnError(t token.TokenType) {
-	msg := fmt.Sprintf("no prefix parse function for %s found", t)
-	p.errors = append(p.errors, msg)
-}
-
-func (p *Parser) parseExpression(precedence int) ast.Expression {
-	defer untrace(trace("parseExpression"))
-	prefix := p.prefixParseFns[p.curToken.Type]
-	if prefix == nil {
-		p.noPrefixParseFnError(p.curToken.Type)
-		return nil
-	}
-
-	leftExp := prefix()
-
-	for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
-		infix := p.infixParseFns[p.peekToken.Type]
-		if infix == nil {
-			return leftExp
-		}
-
-		p.nextToken()
-		leftExp = infix(leftExp)
-	}
-
-	return leftExp
-}
-
-func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
-	defer untrace(trace("parseExpressionStatement"))
-	stmt := &ast.ExpressionStatement{
-		Token: p.curToken,
-	}
-
-	stmt.Expression = p.parseExpression(LOWEST)
-
-	// Semicolons are optional to help use REPL input.
-	if p.peekTokenIs(token.SEMICOLON) {
-		p.nextToken()
-	}
-
-	return stmt
-}

+ 0 - 12
parser/parser_identifier.go

@@ -1,12 +0,0 @@
-package parser
-
-import "code.osinet.fr/fgm/waiig15/ast"
-
-// parseIdentifier does not advance the tokens or call nextToken, and this is
-// important.
-func (p *Parser) parseIdentifier() ast.Expression {
-	return &ast.Identifier{
-		Token: p.curToken,
-		Value: p.curToken.Literal,
-	}
-}

+ 0 - 20
parser/parser_infix.go

@@ -1,20 +0,0 @@
-package parser
-
-import (
-	"code.osinet.fr/fgm/waiig15/ast"
-)
-
-func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
-	defer untrace(trace("parseInfixExpression"))
-	expression := &ast.InfixExpression{
-		Token:    p.curToken,
-		Operator: p.curToken.Literal,
-		Left:     left,
-	}
-
-	precedence := p.curPrecedence()
-	p.nextToken()
-	expression.Right = p.parseExpression(precedence)
-
-	return expression
-}

+ 0 - 64
parser/parser_infix_test.go

@@ -1,64 +0,0 @@
-package parser
-
-import (
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func TestParsingInfixExpressions(t *testing.T) {
-	infixTests := []struct {
-		input      string
-		leftValue  int64
-		operator   string
-		rightValue int64
-	}{
-		{"5 + 5", 5, "+", 5},
-		{"5 - 5", 5, "-", 5},
-		{"5 * 5", 5, "*", 5},
-		{"5 / 5", 5, "/", 5},
-		{"5 > 5", 5, ">", 5},
-		{"5 < 5", 5, "<", 5},
-		{"5 == 5", 5, "==", 5},
-		{"5 != 5", 5, "!=", 5},
-	}
-
-	for _, tt := range infixTests {
-		l := lexer.New(tt.input)
-		p := New(l)
-		program := p.ParseProgram()
-		checkParserErrors(t, p)
-
-		if len(program.Statements) != 1 {
-			t.Fatalf("program.Statements does not contain %d statements. got=%d",
-				1, len(program.Statements))
-		}
-
-		stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
-		if !ok {
-			t.Fatalf("program.Statements[0] is not expressionStatement. got=%T",
-				program.Statements[0])
-		}
-
-		exp, ok := stmt.Expression.(*ast.InfixExpression)
-		if !ok {
-			t.Fatalf("exp is not infixExpression. got=%T", stmt.Expression)
-		}
-
-		// Why no error ?
-		if !testIntegerLiteral(t, exp.Left, tt.leftValue) {
-			return
-		}
-
-		if exp.Operator != tt.operator {
-			t.Fatalf("exp.Operator is not '%s'. got=%s", tt.operator,
-				exp.Operator)
-		}
-
-		// Why no error ?
-		if !testIntegerLiteral(t, exp.Right, tt.rightValue) {
-			return
-		}
-	}
-}

+ 0 - 27
parser/parser_int.go

@@ -1,27 +0,0 @@
-package parser
-
-import (
-	"fmt"
-	"strconv"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-)
-
-func (p *Parser) parseIntegerLiteral() ast.Expression {
-	defer untrace(trace("parseIntegerLiteral"))
-	lit := &ast.IntegerLiteral{
-		Token: p.curToken,
-	}
-
-	// Base 0 allows straight interpretation of octal 0755 or hex 0xABCD.
-	value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
-	if err != nil {
-		msg := fmt.Sprintf("could not parse %q as integer",
-			p.curToken.Literal)
-		p.errors = append(p.errors, msg)
-		return nil
-	}
-
-	lit.Value = value
-	return lit
-}

+ 0 - 40
parser/parser_int_test.go

@@ -1,40 +0,0 @@
-package parser
-
-import (
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func TestIntegerLiteralExpression(t *testing.T) {
-	input := "5;"
-
-	l := lexer.New(input)
-	p := New(l)
-
-	program := p.ParseProgram()
-	checkParserErrors(t, p)
-
-	if len(program.Statements) != 1 {
-		t.Fatalf("program does not have 1 statement. got=%d",
-			len(program.Statements))
-	}
-
-	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
-	if !ok {
-		t.Fatalf("program.Statements[0] is not *ast.ExpressionStatement. got=%T",
-			program.Statements[0])
-	}
-
-	literal, ok := stmt.Expression.(*ast.IntegerLiteral)
-	if !ok {
-		t.Fatalf("expression not *ast.IntegerLiteral. got=%T",
-			stmt.Expression)
-	}
-
-	if literal.Value != 5 {
-		t.Errorf("literal.Value not %s. got=%s", "5",
-			literal.TokenLiteral())
-	}
-}

+ 0 - 38
parser/parser_let.go

@@ -1,38 +0,0 @@
-package parser
-
-import (
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-func (p *Parser) parseLetStatement() *ast.LetStatement {
-	defer untrace(trace("parseLetStatement"))
-	stmt := &ast.LetStatement{
-		Token: p.curToken,
-	}
-
-	// Let statement starts with an IDENT token, so if next token is not an
-	// IDENT, the next statement cannot be a Let statement.
-	if !p.expectPeek(token.IDENT) {
-		return nil
-	}
-
-	stmt.Name = &ast.Identifier{
-		Token: p.curToken,
-		Value: p.curToken.Literal,
-	}
-
-	// The previous expectPeek() call fetched the next token, so we should now
-	// be on the assignment.
-	if !p.expectPeek(token.ASSIGN) {
-		return nil
-	}
-
-	// Skip the expression for now, progress to the semicolon terminating the
-	// statement.
-	for !p.curTokenIs(token.SEMICOLON) {
-		p.nextToken()
-	}
-
-	return stmt
-}

+ 0 - 73
parser/parser_let_test.go

@@ -1,73 +0,0 @@
-package parser
-
-import (
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func TestLetStatements(t *testing.T) {
-	// Try removing the ident, the =, or both, to get human-readable errors.
-	input := `
-let x = 5;
-let y = 10;
-let foobar = 838383;
-`
-	l := lexer.New(input)
-	p := New(l)
-
-	program := p.ParseProgram()
-	checkParserErrors(t, p)
-	if program == nil {
-		t.Fatalf("ParseProgram() returned nil.")
-	}
-
-	if len(program.Statements) != 3 {
-		t.Fatalf("program.Statements does not contain 3 statements, got=%d",
-			len(program.Statements))
-	}
-
-	tests := []struct {
-		expectedIdentifier string
-	}{
-		{"x"},
-		{"y"},
-		{"foobar"},
-	}
-
-	for i, tt := range tests {
-		stmt := program.Statements[i]
-		if !testLetStatement(t, stmt, tt.expectedIdentifier) {
-			return
-		}
-	}
-}
-
-func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
-	if s.TokenLiteral() != "let" {
-		t.Errorf("s.TokenLiteral not 'let', got=%q", s.TokenLiteral())
-		return false
-	}
-
-	// Statement is an interface, we need a concrete type for the value, and we
-	// just determined this looked like a LetStatement.
-	letStmt, ok := s.(*ast.LetStatement)
-	if !ok {
-		t.Errorf("s not *ast.LetStatement{}, got=%T", s)
-	}
-
-	if letStmt.Name.Value != name {
-		t.Errorf("letStmt.Name.Value not %s, got=%s",
-			name, letStmt.Name.Value)
-		return false
-	}
-
-	if letStmt.Name.TokenLiteral() != name {
-		t.Errorf("letStmt.Name.TokenLiteral not %s, got=%s",
-			name, letStmt.Name.TokenLiteral())
-		return false
-	}
-
-	return true
-}

+ 0 - 21
parser/parser_prefix.go

@@ -1,21 +0,0 @@
-package parser
-
-import (
-	"code.osinet.fr/fgm/waiig15/ast"
-)
-
-func (p *Parser) parsePrefixExpression() ast.Expression {
-	defer untrace(trace("parsePrefixExpression"))
-	expression := &ast.PrefixExpression{
-		Token:    p.curToken,
-		Operator: p.curToken.Literal,
-	}
-
-	// Consume the operator token to progress to the prefixed expression.
-	p.nextToken()
-
-	// The precedence is now that of the prefix operator instead of the lowest.
-	expression.Right = p.parseExpression(PREFIX)
-
-	return expression
-}

+ 0 - 74
parser/parser_prefix_test.go

@@ -1,74 +0,0 @@
-package parser
-
-import (
-	"fmt"
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func testIntegerLiteral(t *testing.T, il ast.Expression, value int64) bool {
-	integ, ok := il.(*ast.IntegerLiteral)
-	if !ok {
-		t.Errorf("il not *ast.IntegerLiteral. got=%T", il)
-		return false
-	}
-
-	if integ.Value != value {
-		t.Errorf("integ.Value not %d. got =%d", value, integ.Value)
-		return false
-	}
-
-	if integ.TokenLiteral() != fmt.Sprintf("%d", value) {
-		t.Errorf("integ.TokenLiteral not %d. got=%s",
-			value, integ.TokenLiteral())
-		return false
-	}
-
-	return true
-}
-
-func TestParsingPrefixExpressions(t *testing.T) {
-	prefixTests := []struct {
-		input        string
-		operator     string
-		integerValue int64
-	}{
-		{"!5", "!", 5},
-		{"-15", "-", 15},
-	}
-
-	for _, tt := range prefixTests {
-		l := lexer.New(tt.input)
-		p := New(l)
-		program := p.ParseProgram()
-		checkParserErrors(t, p)
-
-		if len(program.Statements) != 1 {
-			t.Fatalf("program.Statements does not contain %d statements, got=%d\n",
-				1, len(program.Statements))
-		}
-
-		stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
-		if !ok {
-			t.Fatalf("program.STatements[0] is not ast.ExpressionStatement. got=%T",
-				program.Statements[0])
-		}
-
-		exp, ok := stmt.Expression.(*ast.PrefixExpression)
-		if !ok {
-			t.Fatalf("stms is not ast.PrefixExpression. got=%T",
-				stmt.Expression)
-		}
-
-		if exp.Operator != tt.operator {
-			t.Fatalf("exp.Operator is not '%s'. got=%s",
-				tt.operator, exp.Operator)
-		}
-
-		if !testIntegerLiteral(t, exp.Right, tt.integerValue) {
-			return
-		}
-	}
-}

+ 0 - 32
parser/parser_return.go

@@ -1,32 +0,0 @@
-package parser
-
-import (
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/token"
-)
-
-func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
-	defer untrace(trace("parseReturnStatement"))
-	stmt := &ast.ReturnStatement{
-		Token: p.curToken,
-	}
-
-	// There should be an expression to consume here.
-	p.nextToken()
-
-	// Skip the expression for now, progress to the semicolon terminating the
-	// statement.
-	for !p.curTokenIs(token.SEMICOLON) {
-		p.nextToken()
-	}
-
-	return stmt
-}
-
-func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
-	p.infixParseFns[tokenType] = fn
-}
-
-func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
-	p.prefixParseFns[tokenType] = fn
-}

+ 0 - 44
parser/parser_return_test.go

@@ -1,44 +0,0 @@
-package parser
-
-import (
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func TestReturnStatements(t *testing.T) {
-	// Try removing the ident, the =, or both, to get human-readable errors.
-	input := `
-return 5;
-return 10;
-return 993322;
-`
-	l := lexer.New(input)
-	p := New(l)
-
-	program := p.ParseProgram()
-	checkParserErrors(t, p)
-	if program == nil {
-		t.Fatalf("ParseProgram() returned nil.")
-	}
-
-	if len(program.Statements) != 3 {
-		t.Fatalf("program.Statements does not contain 3 statements, got=%d",
-			len(program.Statements))
-	}
-
-	for _, stmt := range program.Statements {
-		// Statement is an interface, we need a concrete type for the value, and
-		// our test input only contains Let statements.
-		returnStmt, ok := stmt.(*ast.ReturnStatement)
-		if !ok {
-			t.Errorf("s not *ast.ReturnStatement{}, got=%T", stmt)
-			continue
-		}
-		if returnStmt.TokenLiteral() != "return" {
-			t.Errorf("s.TokenLiteral not 'return', got=%q",
-				stmt.TokenLiteral())
-		}
-	}
-}

+ 366 - 8
parser/parser_test.go

@@ -3,20 +3,249 @@ package parser
 import (
 	"testing"
 
+	"fmt"
+
+	"code.osinet.fr/fgm/waiig15/ast"
 	"code.osinet.fr/fgm/waiig15/lexer"
 )
 
-func checkParserErrors(t *testing.T, p *Parser) {
-	errors := p.Errors()
-	if len(errors) == 0 {
-		return
+func TestLetStatements(t *testing.T) {
+	// Try removing the ident, the =, or both, to get human-readable errors.
+	input := `
+let x = 5;
+let y = 10;
+let foobar = 838383;
+`
+	l := lexer.New(input)
+	p := New(l)
+
+	program := p.ParseProgram()
+	checkParserErrors(t, p)
+	if program == nil {
+		t.Fatalf("ParseProgram() returned nil.")
 	}
 
-	t.Errorf("parser has %d errors", len(errors))
-	for _, msg := range errors {
-		t.Errorf("parser error: %q", msg)
+	if len(program.Statements) != 3 {
+		t.Fatalf("program.Statements does not contain 3 statements, got=%d",
+			len(program.Statements))
+	}
+
+	tests := []struct {
+		expectedIdentifier string
+	}{
+		{"x"},
+		{"y"},
+		{"foobar"},
+	}
+
+	for i, tt := range tests {
+		stmt := program.Statements[i]
+		if !testLetStatement(t, stmt, tt.expectedIdentifier) {
+			return
+		}
+	}
+}
+
+func TestReturnStatements(t *testing.T) {
+	// Try removing the ident, the =, or both, to get human-readable errors.
+	input := `
+return 5;
+return 10;
+return 993322;
+`
+	l := lexer.New(input)
+	p := New(l)
+
+	program := p.ParseProgram()
+	checkParserErrors(t, p)
+	if program == nil {
+		t.Fatalf("ParseProgram() returned nil.")
+	}
+
+	if len(program.Statements) != 3 {
+		t.Fatalf("program.Statements does not contain 3 statements, got=%d",
+			len(program.Statements))
+	}
+
+	for _, stmt := range program.Statements {
+		// Statement is an interface, we need a concrete type for the value, and
+		// our test input only contains Let statements.
+		returnStmt, ok := stmt.(*ast.ReturnStatement)
+		if !ok {
+			t.Errorf("s not *ast.ReturnStatement{}, got=%T", stmt)
+			continue
+		}
+		if returnStmt.TokenLiteral() != "return" {
+			t.Errorf("s.TokenLiteral not 'return', got=%q",
+				stmt.TokenLiteral())
+		}
+	}
+}
+
+func TestIdentifierExpression(t *testing.T) {
+	const input = "foobar"
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParserErrors(t, p)
+
+	if len(program.Statements) != 1 {
+		t.Fatalf("program has not enough statements. got=%d",
+			len(program.Statements))
+	}
+
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. Got=%T",
+			program.Statements)
+	}
+
+	ident, ok := stmt.Expression.(*ast.Identifier)
+	if !ok {
+		t.Fatalf("exp not *ast.Identifier. got=%T", stmt.Expression)
+	}
+
+	// Why not use input instead of inline strings ?
+	if ident.Value != input {
+		t.Errorf("ident.Value not %s. got=%s", input,
+			ident.Value)
+	}
+	if ident.TokenLiteral() != input {
+		t.Errorf("ident.TokenLiteral not %s. got=%s", input,
+			ident.TokenLiteral())
+	}
+}
+
+func TestIntegerLiteralExpression(t *testing.T) {
+	input := "5;"
+
+	l := lexer.New(input)
+	p := New(l)
+	program := p.ParseProgram()
+	checkParserErrors(t, p)
+
+	if len(program.Statements) != 1 {
+		t.Fatalf("program does not have 1 statement. got=%d",
+			len(program.Statements))
+	}
+	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+	if !ok {
+		t.Fatalf("program.Statements[0] is not *ast.ExpressionStatement. got=%T",
+			program.Statements[0])
+	}
+
+	literal, ok := stmt.Expression.(*ast.IntegerLiteral)
+	if !ok {
+		t.Fatalf("exp not *ast.IntegerLiteral. got=%T", stmt.Expression)
+	}
+	if literal.Value != 5 {
+		t.Errorf("literal.Value not %d. got=%d", 5, literal.Value)
+	}
+	if literal.TokenLiteral() != "5" {
+		t.Errorf("literal.TokenLiteral not %s. got=%s", "5",
+			literal.TokenLiteral())
+	}
+}
+
+func TestParsingPrefixExpressions(t *testing.T) {
+	prefixTests := []struct {
+		input        string
+		operator     string
+		integerValue int64
+	}{
+		{"!5", "!", 5},
+		{"-15", "-", 15},
+	}
+
+	for _, tt := range prefixTests {
+		l := lexer.New(tt.input)
+		p := New(l)
+		program := p.ParseProgram()
+		checkParserErrors(t, p)
+
+		if len(program.Statements) != 1 {
+			t.Fatalf("program.Statements does not contain %d statements, got=%d\n",
+				1, len(program.Statements))
+		}
+
+		stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+		if !ok {
+			t.Fatalf("program.STatements[0] is not ast.ExpressionStatement. got=%T",
+				program.Statements[0])
+		}
+
+		exp, ok := stmt.Expression.(*ast.PrefixExpression)
+		if !ok {
+			t.Fatalf("stms is not ast.PrefixExpression. got=%T",
+				stmt.Expression)
+		}
+
+		if exp.Operator != tt.operator {
+			t.Fatalf("exp.Operator is not '%s'. got=%s",
+				tt.operator, exp.Operator)
+		}
+
+		if !testIntegerLiteral(t, exp.Right, tt.integerValue) {
+			return
+		}
+	}
+}
+
+func TestParsingInfixExpressions(t *testing.T) {
+	infixTests := []struct {
+		input      string
+		leftValue  int64
+		operator   string
+		rightValue int64
+	}{
+		{"5 + 5;", 5, "+", 5},
+		{"5 - 5;", 5, "-", 5},
+		{"5 * 5;", 5, "*", 5},
+		{"5 / 5;", 5, "/", 5},
+		{"5 > 5;", 5, ">", 5},
+		{"5 < 5;", 5, "<", 5},
+		{"5 == 5;", 5, "==", 5},
+		{"5 != 5;", 5, "!=", 5},
+	}
+
+	for _, tt := range infixTests {
+		l := lexer.New(tt.input)
+		p := New(l)
+		program := p.ParseProgram()
+		checkParserErrors(t, p)
+
+		if len(program.Statements) != 1 {
+			t.Fatalf("program.Statements does not contain %d statements. got=%d",
+				1, len(program.Statements))
+		}
+
+		stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
+		if !ok {
+			t.Fatalf("program.Statements[0] is not expressionStatement. got=%T",
+				program.Statements[0])
+		}
+
+		exp, ok := stmt.Expression.(*ast.InfixExpression)
+		if !ok {
+			t.Fatalf("exp is not infixExpression. got=%T", stmt.Expression)
+		}
+
+		// Why no error ?
+		if !testIntegerLiteral(t, exp.Left, tt.leftValue) {
+			return
+		}
+
+		if exp.Operator != tt.operator {
+			t.Fatalf("exp.Operator is not '%s'. got=%s", tt.operator,
+				exp.Operator)
+		}
+
+		// Why no error ?
+		if !testIntegerLiteral(t, exp.Right, tt.rightValue) {
+			return
+		}
 	}
-	t.FailNow()
 }
 
 func TestOperatorPrecedenceParsing(t *testing.T) {
@@ -86,3 +315,132 @@ func TestOperatorPrecedenceParsing(t *testing.T) {
 		}
 	}
 }
+
+func testLetStatement(t *testing.T, s ast.Statement, name string) bool {
+	if s.TokenLiteral() != "let" {
+		t.Errorf("s.TokenLiteral not 'let'. got=%q", s.TokenLiteral())
+		return false
+	}
+
+	// Statement is an interface, we need a concrete type for the value, and we
+	// just determined this looked like a LetStatement.
+	letStmt, ok := s.(*ast.LetStatement)
+	if !ok {
+		t.Errorf("s not *ast.LetStatement. got=%T", s)
+		return false
+	}
+
+	if letStmt.Name.Value != name {
+		t.Errorf("letStmt.Name.Value not '%s'. got=%s", name, letStmt.Name.Value)
+		return false
+	}
+
+	if letStmt.Name.TokenLiteral() != name {
+		t.Errorf("letStmt.Name.TokenLiteral() not '%s'. got=%s",
+			name, letStmt.Name.TokenLiteral())
+		return false
+	}
+
+	return true
+}
+
+func testInfixExpression(
+	t *testing.T,
+	exp ast.Expression,
+	left interface{},
+	operator string,
+	right interface{},
+) bool {
+	opExp, ok := exp.(*ast.InfixExpression)
+	if !ok {
+		t.Errorf("exp is not ast.InfixExpression. got=%T(%s)", exp, exp)
+		return false
+	}
+
+	if !testLiteralExpression(t, opExp.Left, left) {
+		return false
+	}
+
+	if opExp.Operator != operator {
+		t.Errorf("exp.Operator is not '%s'. got=%q", operator, opExp.Operator)
+		return false
+	}
+
+	if !testLiteralExpression(t, opExp.Right, right) {
+		return false
+	}
+
+	return true
+}
+
+func testLiteralExpression(
+	t *testing.T,
+	exp ast.Expression,
+	expected interface{},
+) bool {
+	switch v := expected.(type) {
+	case int:
+		return testIntegerLiteral(t, exp, int64(v))
+	case int64:
+		return testIntegerLiteral(t, exp, v)
+	case string:
+		return testIdentifier(t, exp, v)
+	}
+	t.Errorf("type of exp not handled. got=%T", exp)
+	return false
+}
+
+func testIntegerLiteral(t *testing.T, il ast.Expression, value int64) bool {
+	integ, ok := il.(*ast.IntegerLiteral)
+	if !ok {
+		t.Errorf("il not *ast.IntegerLiteral. got=%T", il)
+		return false
+	}
+
+	if integ.Value != value {
+		t.Errorf("integ.Value not %d. got=%d", value, integ.Value)
+		return false
+	}
+
+	if integ.TokenLiteral() != fmt.Sprintf("%d", value) {
+		t.Errorf("integ.TokenLiteral not %d. got=%s", value,
+			integ.TokenLiteral())
+		return false
+	}
+
+	return true
+}
+
+func testIdentifier(t *testing.T, exp ast.Expression, value string) bool {
+	ident, ok := exp.(*ast.Identifier)
+	if !ok {
+		t.Errorf("exp not *ast.Identifier. got=%T", exp)
+		return false
+	}
+
+	if ident.Value != value {
+		t.Errorf("ident.Value not %s. got=%s", value, ident.Value)
+		return false
+	}
+
+	if ident.TokenLiteral() != value {
+		t.Errorf("ident.TokenLiteral not %s. got=%s", value,
+			ident.TokenLiteral())
+		return false
+	}
+
+	return true
+}
+
+func checkParserErrors(t *testing.T, p *Parser) {
+	errors := p.Errors()
+	if len(errors) == 0 {
+		return
+	}
+
+	t.Errorf("parser has %d errors", len(errors))
+	for _, msg := range errors {
+		t.Errorf("parser error: %q", msg)
+	}
+	t.FailNow()
+}

+ 7 - 7
parser/parser_tracing.go

@@ -5,24 +5,24 @@ import (
 	"strings"
 )
 
+var traceLevel = 0
+
 const traceIndentPlaceholder = "\t"
 
-var traceLevel = 0
+func indentLevel() string {
+	return strings.Repeat(traceIndentPlaceholder, traceLevel-1)
+}
 
 func tracePrint(fs string) {
 	fmt.Printf("%s%s\n", indentLevel(), fs)
 }
 
-func indentDec() {
-	traceLevel--
-}
-
 func indentInc() {
 	traceLevel++
 }
 
-func indentLevel() string {
-	return strings.Repeat(traceIndentPlaceholder, traceLevel-1)
+func indentDec() {
+	traceLevel--
 }
 
 func trace(msg string) string {

+ 0 - 43
parser/parset_identifier_test.go

@@ -1,43 +0,0 @@
-package parser
-
-import (
-	"testing"
-
-	"code.osinet.fr/fgm/waiig15/ast"
-	"code.osinet.fr/fgm/waiig15/lexer"
-)
-
-func TestIdentifierExpression(t *testing.T) {
-	const input = "foobar"
-
-	l := lexer.New(input)
-	p := New(l)
-	program := p.ParseProgram()
-	checkParserErrors(t, p)
-
-	if len(program.Statements) != 1 {
-		t.Fatalf("program has not enough statements. got=%d",
-			len(program.Statements))
-	}
-
-	stmt, ok := program.Statements[0].(*ast.ExpressionStatement)
-	if !ok {
-		t.Fatalf("program.Statements[0] is not ast.ExpressionStatement. Got=%T",
-			program.Statements)
-	}
-
-	ident, ok := stmt.Expression.(*ast.Identifier)
-	if !ok {
-		t.Fatalf("exp not *ast.Identifier. got=%T", stmt.Expression)
-	}
-
-	// Why not use input instead of inline strings ?
-	if ident.Value != input {
-		t.Errorf("ident.Value not %s. got=%s", input,
-			ident.Value)
-	}
-	if ident.TokenLiteral() != input {
-		t.Errorf("ident.TokenLiteral not %s. got=%s", input,
-			ident.TokenLiteral())
-	}
-}

+ 9 - 8
token/token.go

@@ -3,12 +3,6 @@ package token
 // TokenType is the string representation of the Token types.
 type TokenType string
 
-// Token represents a Parser token.
-type Token struct {
-	Type    TokenType
-	Literal string
-}
-
 // The TokenType values.
 const (
 	ILLEGAL = "ILLEGAL"
@@ -26,8 +20,9 @@ const (
 	ASTERISK = "*"
 	SLASH    = "/"
 
-	LT     = "<"
-	GT     = ">"
+	LT = "<"
+	GT = ">"
+
 	EQ     = "=="
 	NOT_EQ = "!="
 
@@ -50,6 +45,12 @@ const (
 	RETURN   = "RETURN"
 )
 
+// Token represents a Parser token.
+type Token struct {
+	Type    TokenType
+	Literal string
+}
+
 var keywords = map[string]TokenType{
 	"fn":     FUNCTION,
 	"let":    LET,