A templating language that looks like Lisp and compiles to HTML
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

lexer.js 2.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. const test = require('tape')
  2. const Lexer = require('../src/lexer')
  3. const tt = require('../src/tokenTypes')
  4. test('lexes simple template correctly', t => {
  5. t.plan(4)
  6. const lexer = new Lexer()
  7. let tokens = lexer.scan(
  8. '(div :class "foobar" (p "Lorem ipsum dolor sit amet"))',
  9. ).tokens
  10. t.deepEqual(tokens.map(token => token.type), [
  11. tt.OPAREN,
  12. tt.IDENTIFIER,
  13. tt.ATTRIBUTE,
  14. tt.QUOTE,
  15. tt.LITERAL,
  16. tt.QUOTE,
  17. tt.OPAREN,
  18. tt.IDENTIFIER,
  19. tt.QUOTE,
  20. tt.LITERAL,
  21. tt.QUOTE,
  22. tt.CPAREN,
  23. tt.CPAREN,
  24. tt.EOF,
  25. ])
  26. t.equal(tokens[1].value, 'div')
  27. t.equal(tokens[2].value, 'class')
  28. t.equal(tokens[9].value, 'Lorem ipsum dolor sit amet')
  29. })
  30. test('keeps track of line numbers', t => {
  31. t.plan(2)
  32. const lexer = new Lexer()
  33. let tokens = lexer.scan(`(
  34. (div :class "foobar"
  35. (p :class "bazquux"))
  36. `).tokens
  37. t.equal(tokens[2].line, 2)
  38. t.equal(tokens[7].line, 3)
  39. })
  40. test('multiple identifiers in a row are kept separate', t => {
  41. t.plan(2)
  42. const lexer = new Lexer()
  43. let tokens = lexer.scan(`(test test test)`).tokens
  44. t.deepEqual(tokens.map(token => token.type), [
  45. tt.OPAREN,
  46. tt.IDENTIFIER,
  47. tt.IDENTIFIER,
  48. tt.IDENTIFIER,
  49. tt.CPAREN,
  50. tt.EOF,
  51. ])
  52. tokens = lexer.scan(`(test "test" test test)`).tokens
  53. t.deepEqual(tokens.map(token => token.type), [
  54. tt.OPAREN,
  55. tt.IDENTIFIER,
  56. tt.QUOTE,
  57. tt.LITERAL,
  58. tt.QUOTE,
  59. tt.IDENTIFIER,
  60. tt.IDENTIFIER,
  61. tt.CPAREN,
  62. tt.EOF,
  63. ])
  64. })
  65. test('allow special characters inside quotes', t => {
  66. t.plan(2)
  67. const lexer = new Lexer()
  68. let tokens = lexer.scan(`
  69. (p "(test)")
  70. `).tokens
  71. t.deepEqual(tokens.map(token => token.type), [
  72. tt.OPAREN,
  73. tt.IDENTIFIER,
  74. tt.QUOTE,
  75. tt.LITERAL,
  76. tt.QUOTE,
  77. tt.CPAREN,
  78. tt.EOF,
  79. ])
  80. t.equal(tokens[3].value, '(test)')
  81. })
  82. test('lex numbers', t => {
  83. t.plan(2)
  84. const lexer = new Lexer()
  85. let tokens = lexer.scan('1 2 3 4 5').tokens
  86. t.deepEqual(tokens.map(token => token.type), [
  87. tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.EOF,
  88. ])
  89. t.deepEqual(tokens.map(token => token.value), [
  90. 1, 2, 3, 4, 5, undefined // undefined is for the EOF
  91. ])
  92. })
  93. test('lex booleans', t => {
  94. t.plan(2)
  95. const lexer = new Lexer()
  96. let tokens = lexer.scan('#t #f').tokens
  97. t.deepEqual(tokens.map(token => token.type), [
  98. tt.BOOLEAN, tt.BOOLEAN, tt.EOF
  99. ])
  100. t.deepEqual(tokens.map(token => token.value), [
  101. true, false, undefined // undefined is for the EOF
  102. ])
  103. })