A work-in-progress SQL parser written in TypeScript
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

lexer.test.ts 2.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. /* tslint:disable:no-unused-expression */
  2. import { expect } from "chai";
  3. import Error, { isError } from "../src/error";
  4. import Lexer from "../src/lexer";
  5. import Token, { TokenKind } from "../src/token";
  6. const scan = (source: string): Token[] | Error => {
  7. return new Lexer(source).scan();
  8. };
  9. describe("Lexer", () => {
  10. it("scans uppercase keywords", () => {
  11. const tokens = scan("SELECT FROM WHERE AS AND OR");
  12. expect(tokens).to.deep.equal([
  13. new Token(TokenKind.SELECT, null, 1),
  14. new Token(TokenKind.FROM, null, 1),
  15. new Token(TokenKind.WHERE, null, 1),
  16. new Token(TokenKind.AS, null, 1),
  17. new Token(TokenKind.AND, null, 1),
  18. new Token(TokenKind.OR, null, 1),
  19. new Token(TokenKind.EOF, null, 1),
  20. ]);
  21. });
  22. it("scans lowercase keywords", () => {
  23. const tokens = scan("select from where as and or");
  24. expect(tokens).to.deep.equal([
  25. new Token(TokenKind.SELECT, null, 1),
  26. new Token(TokenKind.FROM, null, 1),
  27. new Token(TokenKind.WHERE, null, 1),
  28. new Token(TokenKind.AS, null, 1),
  29. new Token(TokenKind.AND, null, 1),
  30. new Token(TokenKind.OR, null, 1),
  31. new Token(TokenKind.EOF, null, 1),
  32. ]);
  33. });
  34. it("scans symbols", () => {
  35. const tokens = scan("=,`;.+-*/()");
  36. expect(tokens).to.deep.equal([
  37. new Token(TokenKind.EQUALS, null, 1),
  38. new Token(TokenKind.COMMA, null, 1),
  39. new Token(TokenKind.BACKTICK, null, 1),
  40. new Token(TokenKind.SEMICOLON, null, 1),
  41. new Token(TokenKind.DOT, null, 1),
  42. new Token(TokenKind.PLUS, null, 1),
  43. new Token(TokenKind.MINUS, null, 1),
  44. new Token(TokenKind.STAR, null, 1),
  45. new Token(TokenKind.SLASH, null, 1),
  46. new Token(TokenKind.LPAREN, null, 1),
  47. new Token(TokenKind.RPAREN, null, 1),
  48. new Token(TokenKind.EOF, null, 1),
  49. ]);
  50. });
  51. it("scans identifiers", () => {
  52. const tokens = scan("ident ident_ifier Ident");
  53. expect(tokens).to.deep.equal([
  54. new Token(TokenKind.IDENTIFIER, "ident", 1),
  55. new Token(TokenKind.IDENTIFIER, "ident_ifier", 1),
  56. new Token(TokenKind.IDENTIFIER, "Ident", 1),
  57. new Token(TokenKind.EOF, null, 1),
  58. ]);
  59. });
  60. it("scans numbers", () => {
  61. const tokens = scan("1 123.45 .123");
  62. expect(tokens).to.deep.equal([
  63. new Token(TokenKind.NUMBER, "1", 1),
  64. new Token(TokenKind.NUMBER, "123.45", 1),
  65. new Token(TokenKind.NUMBER, ".123", 1),
  66. new Token(TokenKind.EOF, null, 1),
  67. ]);
  68. });
  69. it("keeps track of line numbers", () => {
  70. const tokens = scan("foo\nbar");
  71. expect(tokens).to.deep.equal([
  72. new Token(TokenKind.IDENTIFIER, "foo", 1),
  73. new Token(TokenKind.IDENTIFIER, "bar", 2),
  74. new Token(TokenKind.EOF, null, 2),
  75. ]);
  76. });
  77. it("returns an error for an unrecognized character", () => {
  78. const error = scan("~");
  79. expect(isError(error)).to.be.ok;
  80. if (isError(error)) {
  81. expect(error.message).to.equal("Unrecognized character ~");
  82. }
  83. });
  84. });