lexer_test.go 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980
  1. package lexer
  2. import (
  3. "fgm/waiig15/token"
  4. "testing"
  5. )
  6. func TestNextToken(t *testing.T) {
  7. input := `
  8. let five = 5;
  9. let ten = 10;
  10. let add = fn(x, y) {
  11. x + y;
  12. }
  13. let result = add(five, ten);
  14. `
  15. tests := []struct {
  16. expectedType token.TokenType
  17. expectedLiteral string
  18. }{
  19. { token.LET, "let" },
  20. { token.IDENT, "five" },
  21. { token.ASSIGN, "="},
  22. { token.INT, "5" },
  23. { token.SEMICOLON, ";" },
  24. { token.LET, "let" },
  25. { token.IDENT, "ten" },
  26. { token.ASSIGN, "="},
  27. { token.INT, "10" },
  28. { token.SEMICOLON, ";" },
  29. { token.LET, "let" },
  30. { token.IDENT, "add" },
  31. { token.ASSIGN, "="},
  32. { token.FUNCTION, "fn" },
  33. { token.LPAREN, "(" },
  34. { token.IDENT, "x" },
  35. { token.COMMA, "," },
  36. { token.IDENT, "y" },
  37. { token.RPAREN, ")" },
  38. { token.LBRACE, "{" },
  39. { token.IDENT, "x" },
  40. { token.PLUS, "+" },
  41. { token.IDENT, "y" },
  42. { token.SEMICOLON, ";" },
  43. { token.RBRACE, "}" },
  44. { token.LET, "let" },
  45. { token.IDENT, "result" },
  46. { token.ASSIGN, "="},
  47. { token.IDENT, "add" },
  48. { token.LPAREN, "(" },
  49. { token.IDENT, "five" },
  50. { token.COMMA, "," },
  51. { token.IDENT, "ten" },
  52. { token.RPAREN, ")" },
  53. {token.SEMICOLON, ";"},
  54. {token.EOF, ""},
  55. }
  56. l := New(input)
  57. for i, tt := range tests {
  58. tok := l.NextToken()
  59. if tok.Type != tt.expectedType {
  60. t.Fatalf("tests[%d] - tokentype wrong, expected %q, got %q",
  61. i, tt.expectedType, tok.Type)
  62. }
  63. if tok.Literal != tt.expectedLiteral {
  64. t.Fatalf("tests[%d] - literal wrong, expected %q, got %q",
  65. i, tt.expectedLiteral, tok.Literal)
  66. }
  67. }
  68. }