parser.go 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337
  1. package parser
  2. import (
  3. "fmt"
  4. "strconv"
  5. "code.osinet.fr/fgm/waiig15/ast"
  6. "code.osinet.fr/fgm/waiig15/lexer"
  7. "code.osinet.fr/fgm/waiig15/token"
  8. )
  9. // Precedence constants.
  10. const (
  11. _ int = iota
  12. LOWEST
  13. EQUALS // ==
  14. LESSGREATER // > or <
  15. SUM // +
  16. PRODUCT // *
  17. PREFIX // -X or !X
  18. CALL // myFunction(X)
  19. )
  20. var precedences = map[token.TokenType]int{
  21. token.EQ: EQUALS,
  22. token.NOT_EQ: EQUALS,
  23. token.LT: LESSGREATER,
  24. token.GT: LESSGREATER,
  25. token.PLUS: SUM,
  26. token.MINUS: SUM,
  27. token.SLASH: PRODUCT,
  28. token.ASTERISK: PRODUCT,
  29. }
  30. type (
  31. prefixParseFn func() ast.Expression
  32. infixParseFn func(ast.Expression) ast.Expression
  33. )
  34. // Parser implements the parsing mechanism top-level layer.
  35. type Parser struct {
  36. l *lexer.Lexer
  37. errors []string
  38. curToken token.Token
  39. peekToken token.Token
  40. prefixParseFns map[token.TokenType]prefixParseFn
  41. infixParseFns map[token.TokenType]infixParseFn
  42. }
  43. // New returns a new Parser instance with the first two parser tokens already
  44. // loaded.
  45. func New(l *lexer.Lexer) *Parser {
  46. p := &Parser{
  47. l: l,
  48. errors: []string{},
  49. }
  50. p.prefixParseFns = make(map[token.TokenType]prefixParseFn)
  51. p.registerPrefix(token.IDENT, p.parseIdentifier)
  52. p.registerPrefix(token.INT, p.parseIntegerLiteral)
  53. p.registerPrefix(token.BANG, p.parsePrefixExpression)
  54. p.registerPrefix(token.MINUS, p.parsePrefixExpression)
  55. p.infixParseFns = make(map[token.TokenType]infixParseFn)
  56. for _, tok := range []token.TokenType{
  57. token.ASTERISK,
  58. token.EQ,
  59. token.GT,
  60. token.LT,
  61. token.MINUS,
  62. token.NOT_EQ,
  63. token.PLUS,
  64. token.SLASH,
  65. } {
  66. p.registerInfix(tok, p.parseInfixExpression)
  67. }
  68. // Read two tokens, so curToken and peekToken are both set
  69. p.nextToken()
  70. p.nextToken()
  71. return p
  72. }
  73. func (p *Parser) nextToken() {
  74. p.curToken = p.peekToken
  75. p.peekToken = p.l.NextToken()
  76. }
  77. // Is the current token in the parser of the given type ?
  78. func (p *Parser) curTokenIs(t token.TokenType) bool {
  79. return p.curToken.Type == t
  80. }
  81. // Is the next token in the parser of the given type ? Don't consume it.
  82. func (p *Parser) peekTokenIs(t token.TokenType) bool {
  83. return p.peekToken.Type == t
  84. }
  85. // Is the next token in the parser of the given type ? If it is, consume it,
  86. // else don't.
  87. func (p *Parser) expectPeek(t token.TokenType) bool {
  88. if p.peekTokenIs(t) {
  89. p.nextToken()
  90. return true
  91. }
  92. p.peekError(t)
  93. return false
  94. }
  95. // Errors is a getter for Parser.errors.
  96. func (p *Parser) Errors() []string {
  97. return p.errors
  98. }
  99. // Log a mismatch error on the peek token type in the parser instance.
  100. //
  101. // - t is the type of token that was expected
  102. func (p *Parser) peekError(t token.TokenType) {
  103. msg := fmt.Sprintf("expected next token to be %s, got %s instead",
  104. t, p.peekToken.Type)
  105. p.errors = append(p.errors, msg)
  106. }
  107. func (p *Parser) noPrefixParseFnError(t token.TokenType) {
  108. msg := fmt.Sprintf("no prefix parse function for %s found", t)
  109. p.errors = append(p.errors, msg)
  110. }
  111. // ParseProgram is the outermost parsing logic, accumulating statements in a
  112. // Program instance and returning that instance once parsing is done.
  113. func (p *Parser) ParseProgram() *ast.Program {
  114. defer untrace(trace("ParseProgram"))
  115. program := &ast.Program{
  116. Statements: []ast.Statement{},
  117. }
  118. for !p.curTokenIs(token.EOF) {
  119. stmt := p.parseStatement()
  120. if stmt != nil {
  121. program.Statements = append(program.Statements, stmt)
  122. }
  123. p.nextToken()
  124. }
  125. return program
  126. }
  127. func (p *Parser) parseStatement() ast.Statement {
  128. defer untrace(trace("parseStatement"))
  129. switch p.curToken.Type {
  130. case token.LET:
  131. return p.parseLetStatement()
  132. case token.RETURN:
  133. return p.parseReturnStatement()
  134. default:
  135. return p.parseExpressionStatement()
  136. }
  137. }
  138. func (p *Parser) parseLetStatement() *ast.LetStatement {
  139. defer untrace(trace("parseLetStatement"))
  140. stmt := &ast.LetStatement{
  141. Token: p.curToken,
  142. }
  143. // Let statement starts with an IDENT token, so if next token is not an
  144. // IDENT, the next statement cannot be a Let statement.
  145. if !p.expectPeek(token.IDENT) {
  146. return nil
  147. }
  148. stmt.Name = &ast.Identifier{
  149. Token: p.curToken,
  150. Value: p.curToken.Literal,
  151. }
  152. // The previous expectPeek() call fetched the next token, so we should now
  153. // be on the assignment.
  154. if !p.expectPeek(token.ASSIGN) {
  155. return nil
  156. }
  157. // Skip the expression for now, progress to the semicolon terminating the
  158. // statement.
  159. for !p.curTokenIs(token.SEMICOLON) {
  160. p.nextToken()
  161. }
  162. return stmt
  163. }
  164. func (p *Parser) parseReturnStatement() *ast.ReturnStatement {
  165. defer untrace(trace("parseReturnStatement"))
  166. stmt := &ast.ReturnStatement{
  167. Token: p.curToken,
  168. }
  169. // There should be an expression to consume here.
  170. p.nextToken()
  171. // Skip the expression for now, progress to the semicolon terminating the
  172. // statement.
  173. for !p.curTokenIs(token.SEMICOLON) {
  174. p.nextToken()
  175. }
  176. return stmt
  177. }
  178. func (p *Parser) parseExpressionStatement() *ast.ExpressionStatement {
  179. defer untrace(trace("parseExpressionStatement"))
  180. stmt := &ast.ExpressionStatement{
  181. Token: p.curToken,
  182. }
  183. stmt.Expression = p.parseExpression(LOWEST)
  184. // Semicolons are optional to help use REPL input.
  185. if p.peekTokenIs(token.SEMICOLON) {
  186. p.nextToken()
  187. }
  188. return stmt
  189. }
  190. func (p *Parser) parseExpression(precedence int) ast.Expression {
  191. defer untrace(trace("parseExpression"))
  192. prefix := p.prefixParseFns[p.curToken.Type]
  193. if prefix == nil {
  194. p.noPrefixParseFnError(p.curToken.Type)
  195. return nil
  196. }
  197. leftExp := prefix()
  198. for !p.peekTokenIs(token.SEMICOLON) && precedence < p.peekPrecedence() {
  199. infix := p.infixParseFns[p.peekToken.Type]
  200. if infix == nil {
  201. return leftExp
  202. }
  203. p.nextToken()
  204. leftExp = infix(leftExp)
  205. }
  206. return leftExp
  207. }
  208. // Look forward for the precedence of the next token without advancing.
  209. func (p *Parser) peekPrecedence() int {
  210. if precedence, ok := precedences[p.peekToken.Type]; ok {
  211. return precedence
  212. }
  213. return LOWEST
  214. }
  215. // Return the precedence for the current token without advancing.
  216. func (p *Parser) curPrecedence() int {
  217. if precedence, ok := precedences[p.curToken.Type]; ok {
  218. return precedence
  219. }
  220. return LOWEST
  221. }
  222. // parseIdentifier does not advance the tokens or call nextToken, and this is
  223. // important.
  224. func (p *Parser) parseIdentifier() ast.Expression {
  225. return &ast.Identifier{
  226. Token: p.curToken,
  227. Value: p.curToken.Literal,
  228. }
  229. }
  230. func (p *Parser) parseIntegerLiteral() ast.Expression {
  231. defer untrace(trace("parseIntegerLiteral"))
  232. lit := &ast.IntegerLiteral{
  233. Token: p.curToken,
  234. }
  235. // Base 0 allows straight interpretation of octal 0755 or hex 0xABCD.
  236. value, err := strconv.ParseInt(p.curToken.Literal, 0, 64)
  237. if err != nil {
  238. msg := fmt.Sprintf("could not parse %q as integer",
  239. p.curToken.Literal)
  240. p.errors = append(p.errors, msg)
  241. return nil
  242. }
  243. lit.Value = value
  244. return lit
  245. }
  246. func (p *Parser) parsePrefixExpression() ast.Expression {
  247. defer untrace(trace("parsePrefixExpression"))
  248. expression := &ast.PrefixExpression{
  249. Token: p.curToken,
  250. Operator: p.curToken.Literal,
  251. }
  252. // Consume the operator token to progress to the prefixed expression.
  253. p.nextToken()
  254. // The precedence is now that of the prefix operator instead of the lowest.
  255. expression.Right = p.parseExpression(PREFIX)
  256. return expression
  257. }
  258. func (p *Parser) parseInfixExpression(left ast.Expression) ast.Expression {
  259. defer untrace(trace("parseInfixExpression"))
  260. expression := &ast.InfixExpression{
  261. Token: p.curToken,
  262. Operator: p.curToken.Literal,
  263. Left: left,
  264. }
  265. precedence := p.curPrecedence()
  266. p.nextToken()
  267. expression.Right = p.parseExpression(precedence)
  268. return expression
  269. }
  270. func (p *Parser) registerPrefix(tokenType token.TokenType, fn prefixParseFn) {
  271. p.prefixParseFns[tokenType] = fn
  272. }
  273. func (p *Parser) registerInfix(tokenType token.TokenType, fn infixParseFn) {
  274. p.infixParseFns[tokenType] = fn
  275. }