Skip to content
This repository was archived by the owner on Sep 9, 2024. It is now read-only.

Commit 9f3f103

Browse files
committed
Encoder NodeJS stream support
1 parent 1a6aae9 commit 9f3f103

File tree

3 files changed

+95
-59
lines changed

3 files changed

+95
-59
lines changed

TODO.md

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
* LZ4 stream dictionary
2-
* NodeJS streams updates
2+
* NodeJS streams updates
3+
* dependent blocks compression

lib/encoder.js

+45-20
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@ function LZ4Stream (options) {
3636
this.options = o
3737

3838
this.compress = o.highCompression ? lz4_binding.compressHCLimited : lz4_binding.compressLimited
39-
this.chunkBound = lz4_binding.compressBound(o.blockMaxSize)
4039

4140
// Build the stream descriptor from the options
4241
// flags
@@ -66,18 +65,23 @@ function LZ4Stream (options) {
6665
}
6766

6867
// Add data to the stream, splitting blocks according to blockMaxSize
69-
LZ4Stream.prototype.push = function (data) {
68+
LZ4Stream.prototype.add = function (data) {
7069
if (!data) return
7170

7271
for (var size, i = 0, n = data.length; i < n; i += size) {
7372
size = Math.min(n - i, this.options.blockMaxSize)
74-
this._push( data.slice(i, i + size) )
73+
this._add( data.slice(i, i + size) )
7574
}
7675
}
7776

77+
// Shift a block
78+
LZ4Stream.prototype.shiftBlock = function () {
79+
return this.blocks.shift()
80+
}
81+
7882
// Compress and add a data block to the stream
7983
// The block is uncompressed if it is bigger than blockMaxSize
80-
LZ4Stream.prototype._push = function (data) {
84+
LZ4Stream.prototype._add = function (data) {
8185
if (!data) return
8286

8387
var compressed = new Buffer( data.length )
@@ -114,10 +118,14 @@ LZ4Stream.prototype._push = function (data) {
114118
this.size += data.length
115119
}
116120

117-
LZ4Stream.prototype.flush = function () {
118-
var res = [ lz4_static.MAGICNUMBER_BUFFER ]
119-
// Allocate maximum descriptor size...
120-
var descriptor = new Buffer(15)
121+
LZ4Stream.prototype.header = function () {
122+
// Allocate magic number + maximum descriptor size
123+
var magicSize = 4
124+
var res = new Buffer(magicSize + 15)
125+
126+
res.writeUInt32LE(lz4_static.MAGICNUMBER, 0, false)
127+
128+
var descriptor = res.slice(magicSize)
121129
var descriptorLength = 3
122130

123131
// Update the stream descriptor
@@ -143,22 +151,37 @@ LZ4Stream.prototype.flush = function () {
143151
, descriptorLength - 1, false
144152
)
145153

146-
// ...then slice it accordingly
154+
// Adjust size according to descriptor length
147155
if (descriptorLength < descriptor.length)
148-
descriptor = descriptor.slice(0, descriptorLength)
149-
150-
res.push(descriptor)
156+
res = res.slice(0, magicSize + descriptorLength)
151157

152-
// Add compressed blocks
153-
res.push.apply(res, this.blocks)
158+
return res
159+
}
154160

155-
res.push(lz4_static.EOS_BUFFER)
161+
LZ4Stream.prototype.tail = function () {
162+
var eosSize = 4
156163

157164
if (this.options.streamChecksum) {
158-
var checksum = new Buffer(4)
159-
checksum.writeUInt32LE( utils.streamChecksum(null, this.checksum), 0, false )
160-
res.push( checksum )
165+
var res = new Buffer(eosSize + 4)
166+
res.writeUInt32LE( utils.streamChecksum(null, this.checksum), eosSize, false )
167+
} else {
168+
var res = new Buffer(eosSize)
161169
}
170+
171+
res.writeUInt32LE(lz4_static.EOS, 0, false)
172+
173+
return res
174+
}
175+
176+
LZ4Stream.prototype.done = function () {
177+
var res = []
178+
179+
res.push( this.header() )
180+
181+
// Add compressed blocks
182+
res.push.apply(res, this.blocks)
183+
184+
res.push( this.tail() )
162185

163186
return Buffer.concat(res)
164187
}
@@ -172,6 +195,8 @@ function integerBytesLength (i) {
172195

173196
exports.LZ4_compress = function (input, options) {
174197
var LZ4S = new LZ4Stream(options)
175-
LZ4S.push(input)
176-
return LZ4S.flush()
198+
LZ4S.add(input)
199+
return LZ4S.done()
177200
}
201+
202+
exports.LZ4Stream = LZ4Stream

lib/encoder_stream.js

+48-38
Original file line numberDiff line numberDiff line change
@@ -1,76 +1,86 @@
11
var Transform = require('stream').Transform
22
var inherits = require('util').inherits
33

4-
var encoder = require('./encoder')
4+
var LZ4Stream = require('./encoder').LZ4Stream
55
var lz4_static = require('./static')
66

7-
var LZ4_compressChunk = encoder.LZ4_compressChunk
8-
var LZ4_compressHCChunk = encoder.LZ4_compressHCChunk
9-
var LZ4_compressBound = encoder.LZ4_compressBound
10-
117
/**
12-
Build up chunks and encode them one by one as they are assembled
8+
Build up blocks and encode them one by one as they are assembled
139
*/
1410
function Encoder (options) {
1511
if ( !(this instanceof Encoder) )
1612
return new Encoder(options)
1713

1814
Transform.call(this, options)
19-
// Options
20-
options = options || {}
21-
this.chunkSize = options.chunkSize || lz4_static.DEFAULT_CHUNKSIZE
22-
this.compress = options.hc ? LZ4_compressHCChunk : LZ4_compressChunk
15+
LZ4Stream.call(this, options)
2316

2417
// Data being processed
2518
this.buffer = []
2619
this.length = 0
2720

2821
this.first = true
29-
this.chunkBound = LZ4_compressBound(this.chunkSize) + 4
22+
this.fast = !this.options.streamSize
3023
}
3124
inherits(Encoder, Transform)
25+
Object.keys(LZ4Stream.prototype).forEach(function(method) {
26+
if (!Encoder.prototype[method])
27+
Encoder.prototype[method] = LZ4Stream.prototype[method];
28+
});
3229

3330
Encoder.prototype._transform = function (data, encoding, done) {
3431
// Buffer the incoming data
3532
this.buffer.push(data)
3633
this.length += data.length
3734

38-
if (this.first) {
39-
var buf = new Buffer(4)
40-
buf.writeUInt32LE(lz4_static.ARCHIVE_MAGICNUMBER, 0, false)
41-
this.push(buf)
42-
this.first = false
43-
}
35+
var blockMaxSize = this.options.blockMaxSize
4436

45-
if ( this.length < this.chunkSize ) return done()
4637

47-
this._compressChunk(this.chunkSize, done)
48-
}
38+
// Not enough data for a block
39+
if ( this.length < blockMaxSize ) return done()
4940

50-
Encoder.prototype._flush = function (done) {
51-
if (this.length === 0) return done()
41+
// Build the data to be compressed
42+
var buf = Buffer.concat(this.buffer, this.length)
5243

53-
while ( this.length > this.chunkSize ) {
54-
this._compressChunk(this.chunkSize)
55-
}
44+
// Compress the block
45+
this.add( buf.slice(0, blockMaxSize) )
5646

57-
this._compressChunk(this.length, done)
58-
}
47+
// Set the remaining data
48+
if (buf.length > blockMaxSize) {
49+
this.buffer = []
50+
this.length = 0
51+
} else {
52+
this.buffer = [ buf.slice(blockMaxSize) ]
53+
this.length = buf.length - blockMaxSize
54+
}
5955

60-
Encoder.prototype._compressChunk = function (size, done) {
61-
var buf = new Buffer(this.chunkBound)
62-
var input = Buffer.concat(this.buffer, this.length)
63-
var res = this.compress( input.slice(0, size), buf.slice(4) )
64-
if (res === 0)
65-
return done( null, new Error('Compression error') )
56+
if (this.fast) {
57+
if (this.first) {
58+
this.push( this.header() )
59+
this.first = false
60+
}
61+
this.push( this.shiftBlock() )
62+
}
6663

67-
buf.writeUInt32LE(res, 0, false)
68-
this.push( buf.slice(0, res + 4) )
64+
done()
65+
}
6966

70-
this.length = input.length - size
71-
this.buffer = this.length > 0 ? [ input.slice(size) ] : []
67+
Encoder.prototype._flush = function (done) {
68+
if (this.length > 0)
69+
this.add( Buffer.concat(this.buffer, this.length) )
70+
71+
if (this.fast) {
72+
if (this.first) {
73+
this.push( this.header() )
74+
this.first = false
75+
}
76+
var block
77+
while ( block = this.shiftBlock() ) this.push(block)
78+
this.push( this.tail() )
79+
} else {
80+
this.push( this.done() )
81+
}
7282

73-
if (done) done()
83+
done()
7484
}
7585

7686
module.exports = Encoder

0 commit comments

Comments
 (0)