-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathauntie-dist-async-stream-example.js
56 lines (48 loc) · 1.74 KB
/
auntie-dist-async-stream-example.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
/*
* Auntie#dist example it (ASYNC) loads a file containing 8192 long english words,
* separated by CRLF '\r\n' sequence
* For "messing" things up, you could reduce the chunk size to 4 bytes.
*/
const log = console.log
, fs = require( 'fs' )
, Auntie = require( '../' )
, path = __dirname + '/data/some-english-words-seq.txt'
, pattern = '\r\n->'
// default pattern is '\r\n'
, untie = Auntie( pattern )
// create an async read stream
, rstream = fs.createReadStream( path )
;
log( '\n- Auntie#count example, load english long words from a file in ASYNC way:\n "%s"\n', path );
// uncomment lines below to reduce the stream chunk size to 4 bytes
// log( '- current highwatermark value for stream: %d bytes', rstream._readableState.highWaterMark );
// I voluntarily reduce the chunk buffer size to 4 bytes
rstream._readableState.highWaterMark = 1;
// log( '- new highwatermark value for stream: %d bytes', rstream._readableState.highWaterMark );
log( '- sequence to parse is "\\r\\n" ->', untie.seq );
log( '- starting parse data stream..' );
log( '- counting occurrences in the data stream..' );
let chunks = 0
, tot = 0
, result = null
;
rstream.on( 'data', function ( chunk ) {
++chunks;
tot += chunk.length;
log()
log( 'c:', chunk )
result = untie.dist( chunk );
log( 'r:', result )
log()
} );
rstream.on( 'end', function () {
log( '- !end stream' );
} );
rstream.on( 'close', function () {
log( '- !close stream' );
log( '\n- total data length: %d bytes', tot );
log( '- total matches: %d (lines)', result[ 0 ] );
log( '\n- min length: %d bytes', result[ 1 ] );
log( '- max length: %d bytes', result[ 2 ] );
log( '- remaining : %d bytes\n', result[ 3 ] );
} );