lexer_test.go 767 B

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. package lexer
  2. import (
  3. "fgm/waiig15/token"
  4. "testing"
  5. )
  6. func TestNextToken(t *testing.T) {
  7. input := `=+(){},;`
  8. tests := []struct {
  9. expectedType token.TokenType
  10. expectedLiteral string
  11. }{
  12. {token.ASSIGN, "="},
  13. {token.PLUS, "+"},
  14. {token.LPAREN, "("},
  15. {token.RPAREN, ")"},
  16. {token.LBRACE, "{"},
  17. {token.RBRACE, "}"},
  18. {token.COMMA, ","},
  19. {token.SEMICOLON, ";"},
  20. {token.EOF, ""},
  21. }
  22. l := New(input)
  23. for i, tt := range tests {
  24. tok := l.NextToken()
  25. if tok.Type != tt.expectedType {
  26. t.Fatalf("tests[%d] - tokentype wrong, expected %q, got %q",
  27. i, tt.expectedType, tok.Type)
  28. }
  29. if tok.Literal != tt.expectedLiteral {
  30. t.Fatalf("tests[%d] - literal wrong, expected %q, got %q",
  31. i, tt.expectedLiteral, tok.Literal)
  32. }
  33. }
  34. }