123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111 |
- const test = require('tape')
-
- const Lexer = require('../src/lexer')
- const tt = require('../src/tokenTypes')
-
- test('lexes simple template correctly', t => {
- t.plan(4)
- const lexer = new Lexer()
- let tokens = lexer.scan(
- '(div :class "foobar" (p "Lorem ipsum dolor sit amet"))',
- ).tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.OPAREN,
- tt.IDENTIFIER,
- tt.ATTRIBUTE,
- tt.QUOTE,
- tt.LITERAL,
- tt.QUOTE,
- tt.OPAREN,
- tt.IDENTIFIER,
- tt.QUOTE,
- tt.LITERAL,
- tt.QUOTE,
- tt.CPAREN,
- tt.CPAREN,
- tt.EOF,
- ])
- t.equal(tokens[1].value, 'div')
- t.equal(tokens[2].value, 'class')
- t.equal(tokens[9].value, 'Lorem ipsum dolor sit amet')
- })
-
- test('keeps track of line numbers', t => {
- t.plan(2)
- const lexer = new Lexer()
- let tokens = lexer.scan(`(
- (div :class "foobar"
- (p :class "bazquux"))
- `).tokens
- t.equal(tokens[2].line, 2)
- t.equal(tokens[7].line, 3)
- })
-
- test('multiple identifiers in a row are kept separate', t => {
- t.plan(2)
- const lexer = new Lexer()
- let tokens = lexer.scan(`(test test test)`).tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.OPAREN,
- tt.IDENTIFIER,
- tt.IDENTIFIER,
- tt.IDENTIFIER,
- tt.CPAREN,
- tt.EOF,
- ])
- tokens = lexer.scan(`(test "test" test test)`).tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.OPAREN,
- tt.IDENTIFIER,
- tt.QUOTE,
- tt.LITERAL,
- tt.QUOTE,
- tt.IDENTIFIER,
- tt.IDENTIFIER,
- tt.CPAREN,
- tt.EOF,
- ])
- })
-
- test('allow special characters inside quotes', t => {
- t.plan(2)
- const lexer = new Lexer()
- let tokens = lexer.scan(`
- (p "(test)")
- `).tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.OPAREN,
- tt.IDENTIFIER,
- tt.QUOTE,
- tt.LITERAL,
- tt.QUOTE,
- tt.CPAREN,
- tt.EOF,
- ])
- t.equal(tokens[3].value, '(test)')
- })
-
- test('lex numbers', t => {
- t.plan(2)
- const lexer = new Lexer()
- let tokens = lexer.scan('1 2 3 4 5').tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.NUMBER, tt.EOF,
- ])
- t.deepEqual(tokens.map(token => token.value), [
- 1, 2, 3, 4, 5, undefined // undefined is for the EOF
- ])
- })
-
- test('lex booleans', t => {
- t.plan(2)
- const lexer = new Lexer()
- let tokens = lexer.scan('#t #f').tokens
- t.deepEqual(tokens.map(token => token.type), [
- tt.BOOLEAN, tt.BOOLEAN, tt.EOF
- ])
- t.deepEqual(tokens.map(token => token.value), [
- true, false, undefined // undefined is for the EOF
- ])
- })
|