Skip to content

Commit

Permalink
Add Lexer#stream()
Browse files Browse the repository at this point in the history
  • Loading branch information
nathan committed Mar 17, 2017
1 parent 7bbe566 commit f9ebf0c
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 0 deletions.
17 changes: 17 additions & 0 deletions moo.js
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,23 @@
this.reset()
}

if (typeof require !== 'undefined') {
var Transform = require('stream').Transform

Lexer.prototype.stream = function(state) {
var self = this.reset('', state)
return new Transform({
readableObjectMode: true,
transform(chunk, encoding, cb) {
self.feed(chunk.toString())
var token
while (token = self.next()) this.push(token)
cb()
}
})
}
}

Lexer.prototype.setState = function(state) {
if (!state || this.state === state) return
this.state = state
Expand Down
67 changes: 67 additions & 0 deletions test/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -468,6 +468,73 @@ describe('errors', () => {
})


describe('streams', () => {
const lexer = compile({
word: /[a-z]+/,
space: {match: /\s+/, lineBreaks: true},
})
const {Readable, Writable} = require('stream')

const inputs = ['this is\n', 'a test']
const tokens = [
{type: 'word', value: 'this'},
{type: 'space', value: ' '},
{type: 'word', value: 'is'},
{type: 'space', value: '\n'},
{type: 'word', value: 'a'},
{type: 'space', value: ' '},
{type: 'word', value: 'test'},
]

test('can be written and read', () => new Promise((resolve, reject) => {
let index = 0
expect.assertions(tokens.length)

const s = lexer.stream()
s.write(inputs[0])
s.end(inputs[1])

s.on('data', tok => {
try {
expect(tok).toMatchObject(tokens[index++])
} catch (e) {reject(e)}
})
.on('error', reject)
.on('end', resolve)
}))

test('can be piped to/from', () => new Promise((resolve, reject) => {
let input = 0
const rs = new Readable({
read() {
try {
this.push(input < inputs.length ?
Buffer.from(inputs[input++], 'ascii') : null)
} catch (e) {console.log('read', e) || reject(e)}
}
})

let index = 0
expect.assertions(tokens.length)
const ws = new Writable({
objectMode: true,
write(tok, _, cb) {
try {
expect(tok).toMatchObject(tokens[index++])
cb()
} catch (e) {cb(e)}
}
})

rs
.on('error', reject).pipe(lexer.stream())
.on('error', reject).pipe(ws)
.on('error', reject)
.on('finish', resolve)
}))
})


describe('python tokenizer', () => {

test("1 + 2", () => {
Expand Down

0 comments on commit f9ebf0c

Please sign in to comment.