parser.go 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. package parser
  2. import (
  3. "fmt"
  4. "code.osinet.fr/fgm/waiig15/ast"
  5. "code.osinet.fr/fgm/waiig15/lexer"
  6. "code.osinet.fr/fgm/waiig15/token"
  7. )
  8. // Precedence constants.
  9. const (
  10. _ int = iota
  11. LOWEST
  12. EQUALS // ==
  13. LESSGREATER // > or <
  14. SUM // +
  15. PRODUCT // *
  16. PREFIX // -X or !X
  17. CALL // myFunction(X)
  18. )
  19. // Parser implements the parsing mechanism top-level layer.
  20. type Parser struct {
  21. errors []string
  22. l *lexer.Lexer
  23. curToken token.Token
  24. peekToken token.Token
  25. prefixParseFns map[token.TokenType]prefixParseFn
  26. infixParseFns map[token.TokenType]infixParseFn
  27. }
  28. type (
  29. prefixParseFn func() ast.Expression
  30. infixParseFn func(ast.Expression) ast.Expression
  31. )
  32. var precedences = map[token.TokenType]int{
  33. token.EQ: EQUALS,
  34. token.NOT_EQ: EQUALS,
  35. token.LT: LESSGREATER,
  36. token.GT: LESSGREATER,
  37. token.PLUS: SUM,
  38. token.MINUS: SUM,
  39. token.SLASH: PRODUCT,
  40. token.ASTERISK: PRODUCT,
  41. }
  42. // New returns a new Parser instance with the first two parser tokens already
  43. // loaded.
  44. func New(l *lexer.Lexer) *Parser {
  45. p := &Parser{
  46. l: l,
  47. errors: []string{},
  48. }
  49. p.infixParseFns = make(map[token.TokenType]infixParseFn)
  50. for _, tok := range []token.TokenType{
  51. token.ASTERISK,
  52. token.EQ,
  53. token.GT,
  54. token.LT,
  55. token.MINUS,
  56. token.NOT_EQ,
  57. token.PLUS,
  58. token.SLASH,
  59. } {
  60. p.registerInfix(tok, p.parseInfixExpression)
  61. }
  62. p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
  63. p.registerPrefix(token.BANG, p.parsePrefixExpression)
  64. p.registerPrefix(token.IDENT, p.parseIdentifier)
  65. p.registerPrefix(token.INT, p.parseIntegerLiteral)
  66. p.registerPrefix(token.MINUS, p.parsePrefixExpression)
  67. // Read two tokens, so curToken and peeToken are both set.
  68. p.nextToken()
  69. p.nextToken()
  70. return p
  71. }
  72. // Errors is a getter for Parser.errors.
  73. func (p *Parser) Errors() []string {
  74. return p.errors
  75. }
  76. // ParseProgram is the outermost parsing logic, accumulating statements in a
  77. // Program instance and returning that instance once parsing is done.
  78. func (p *Parser) ParseProgram() *ast.Program {
  79. defer untrace(trace("ParseProgram"))
  80. program := &ast.Program{
  81. Statements: []ast.Statement{},
  82. }
  83. for !p.curTokenIs(token.EOF) {
  84. stmt := p.parseStatement()
  85. if stmt != nil {
  86. program.Statements = append(program.Statements, stmt)
  87. }
  88. p.nextToken()
  89. }
  90. return program
  91. }
  92. // Return the precedence for the current token without advancing.
  93. func (p *Parser) curPrecedence() int {
  94. if precedence, ok := precedences[p.curToken.Type]; ok {
  95. return precedence
  96. }
  97. return LOWEST
  98. }
  99. // Is the current token in the parser of the given type ?
  100. func (p *Parser) curTokenIs(t token.TokenType) bool {
  101. return p.curToken.Type == t
  102. }
  103. // Is the next token in the parser of the given type ? If it is, consume it,
  104. // else don't.
  105. func (p *Parser) expectPeek(t token.TokenType) bool {
  106. if p.peekTokenIs(t) {
  107. p.nextToken()
  108. return true
  109. }
  110. p.peekError(t)
  111. return false
  112. }
  113. func (p *Parser) nextToken() {
  114. p.curToken = p.peekToken
  115. p.peekToken = p.l.NextToken()
  116. }
  117. func (p *Parser) parseStatement() ast.Statement {
  118. defer untrace(trace("parseStatement"))
  119. switch p.curToken.Type {
  120. case token.LET:
  121. return p.parseLetStatement()
  122. case token.RETURN:
  123. return p.parseReturnStatement()
  124. default:
  125. return p.parseExpressionStatement()
  126. }
  127. }
  128. // Log a mismatch error on the peek token type in the parser instance.
  129. //
  130. // - t is the type of token that was expected
  131. func (p *Parser) peekError(t token.TokenType) {
  132. msg := fmt.Sprintf("expected next token to be %s, got %s instead",
  133. t, p.peekToken.Type)
  134. p.errors = append(p.errors, msg)
  135. }
  136. // Look forward for the precedence of the next token without advancing.
  137. func (p *Parser) peekPrecedence() int {
  138. if precedence, ok := precedences[p.peekToken.Type]; ok {
  139. return precedence
  140. }
  141. return LOWEST
  142. }
  143. // Is the next token in the parser of the given type ? Don't consume it.
  144. func (p *Parser) peekTokenIs(t token.TokenType) bool {
  145. return p.peekToken.Type == t
  146. }