123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081 |
- const test = require('tape')
-
- const Compiler = require('../src/compiler')
- const Lexer = require('../src/lexer')
- const Node = require('../src/node')
- const Parser = require('../src/parser')
- const tt = require('../src/tokenTypes')
-
- test('compiles a simple template', t => {
- t.plan(1)
- const lexer = new Lexer()
- const tokenStream = lexer.scan(`
- (div :class "foobar"
- (p :class "bazquux" "Lorem ipsum dolor sit amet."))
- `)
- const parser = new Parser(tokenStream)
- const tree = parser.parse()
- const compiler = new Compiler(tree)
- const result = compiler.compile()
- t.deepEqual(
- result.replace(/\n/g, '').replace(/ +/g, ''),
- '<div class="foobar"><p class="bazquux">Lorem ipsum dolor sit amet.</p></div>',
- )
- })
-
- test('renders variables according to passed-in context', t => {
- t.plan(1)
- const lexer = new Lexer()
- const tokenStream = lexer.scan(`
- (div :class classOne
- (p :class classTwo bodyText))
- `)
- const parser = new Parser(tokenStream)
- const tree = parser.parse()
- const compiler = new Compiler(tree, {
- classOne: 'foobar',
- classTwo: 'bazquux',
- bodyText: 'Lorem ipsum dolor sit amet.',
- })
- const result = compiler.compile()
- t.deepEqual(
- result.replace(/\n/g, '').replace(/ +/g, ''),
- '<div class="foobar"><p class="bazquux">Lorem ipsum dolor sit amet.</p></div>',
- )
- })
-
- test('compiles map operations', function(t) {
- t.plan(1)
- const lexer = new Lexer()
- const tokenStream = lexer.scan(`
- (ul
- (each items 'item (li item)))
- `)
- const parser = new Parser(tokenStream)
- const tree = parser.parse()
- const compiler = new Compiler(tree, {
- items: ['one', 'two', 'three'],
- })
- const result = compiler.compile()
- t.deepEqual(
- result.replace(/\n/g, '').replace(/ +/g, ''),
- '<ul><li>one</li><li>two</li><li>three</li></ul>',
- )
- })
-
- test('self closing tags are respected', function(t) {
- t.plan(1)
- const lexer = new Lexer()
- const tokenStream = lexer.scan(`
- (meta :charset "UTF-8")
- (img :src "test.png")
- `)
- const parser = new Parser(tokenStream)
- const tree = parser.parse()
- const compiler = new Compiler(tree)
- const result = compiler.compile()
- t.deepEqual(
- result.replace(/\n/g, '').replace(/ +/g, ''),
- '<meta charset="UTF-8"><img src="test.png">',
- )
- })
|