1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162 |
- const test = require('tape')
-
- const Lexer = require('../src/lexer')
- const AST = require('../src/ast/index')
- const Parser = require('../src/parser')
- const tt = require('../src/tokenTypes')
-
- test('parses token stream into a tree', t => {
- t.plan(1)
- const lexer = new Lexer()
- let tokenStream = lexer.scan(`
- (div :class "foobar"
- (p :class (cond #t "primary" "secondary")))
- `)
- let parser = new Parser(tokenStream)
- let tree = parser.parse()
-
- t.deepEqual(tree, [
- new AST.Application({
- functionName: new AST.Identifier({ name: 'div' }),
- args: [
- new AST.Attribute({
- name: 'class',
- value: new AST.String({ value: 'foobar' })
- }),
- new AST.Application({
- functionName: new AST.Identifier({ name: 'p' }),
- args: [
- new AST.Attribute({
- name: 'class',
- value: new AST.Application({
- functionName: new AST.Identifier({ name: 'cond' }),
- args: [
- new AST.Boolean({ value: true }),
- new AST.String({ value: 'primary' }),
- new AST.String({ value: 'secondary' }),
- ]
- })
- })
- ]
- })
- ]
- })
- ])
- })
-
- test('allow empty strings', t => {
- t.plan(1)
- const lexer = new Lexer()
- const tokenStream = lexer.scan('(p "")')
- const parser = new Parser(tokenStream)
- const tree = parser.parse()
-
- t.deepEqual(tree, [
- new AST.Application({
- functionName: new AST.Identifier({ name: 'p' }),
- args: [
- new AST.String({ value: '' })
- ]
- })
- ])
- })
|