Skip to content

Commit bfb2cd0

Browse files
JacksonTianjasnell
authored andcommitted
stream: add bytesRead property for readable
Add a bytesRead property for readable is useful in some use cases. When user want know how many bytes read of readable, need to caculate it in userland. If encoding is specificed, get the value is very slowly. PR-URL: #4372 Reviewed-By: Trevor Norris <[email protected]> Reviewed-By: Colin Ihrig <[email protected]> Reviewed-By: James M Snell <[email protected]>
1 parent ebd9add commit bfb2cd0

File tree

4 files changed

+127
-5
lines changed

4 files changed

+127
-5
lines changed

doc/api/stream.markdown

+5
Original file line numberDiff line numberDiff line change
@@ -250,6 +250,11 @@ readable: null
250250
end
251251
```
252252

253+
254+
#### readable.bytesRead
255+
256+
The amount of read bytes. If `objectMode` is `true`, the value is 0 always.
257+
253258
#### readable.isPaused()
254259

255260
* Return: `Boolean`

lib/_stream_readable.js

+3
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,8 @@ function Readable(options) {
8383

8484
this._readableState = new ReadableState(options, this);
8585

86+
this.bytesRead = 0;
87+
8688
// legacy
8789
this.readable = true;
8890

@@ -135,6 +137,7 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
135137
var e = new Error('stream.unshift() after end event');
136138
stream.emit('error', e);
137139
} else {
140+
stream.bytesRead += state.objectMode ? 0 : chunk.length;
138141
if (state.decoder && !addToFront && !encoding)
139142
chunk = state.decoder.write(chunk);
140143

lib/net.js

-5
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ exports._normalizeConnectArgs = normalizeConnectArgs;
9191
// called when creating new Socket, or when re-using a closed Socket
9292
function initSocketHandle(self) {
9393
self.destroyed = false;
94-
self.bytesRead = 0;
9594
self._bytesDispatched = 0;
9695
self._sockname = null;
9796

@@ -515,10 +514,6 @@ function onread(nread, buffer) {
515514
// will prevent this from being called again until _read() gets
516515
// called again.
517516

518-
// if it's not enough data, we'll just call handle.readStart()
519-
// again right away.
520-
self.bytesRead += nread;
521-
522517
// Optimization: emit the original buffer with end points
523518
var ret = self.push(buffer);
524519

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
'use strict';
2+
3+
require('../common');
4+
const assert = require('assert');
5+
const Readable = require('stream').Readable;
6+
const Duplex = require('stream').Duplex;
7+
const Transform = require('stream').Transform;
8+
9+
(function() {
10+
const readable = new Readable({
11+
read: function(n) {
12+
var i = this._index++;
13+
if (i > this._max)
14+
this.push(null);
15+
else
16+
this.push(new Buffer('a'));
17+
}
18+
});
19+
20+
readable._max = 1000;
21+
readable._index = 1;
22+
23+
var total = 0;
24+
readable.on('data', function(chunk) {
25+
total += chunk.length;
26+
});
27+
28+
readable.on('end', function() {
29+
assert.equal(total, readable.bytesRead);
30+
});
31+
})();
32+
33+
(function() {
34+
const readable = new Readable({
35+
read: function(n) {
36+
var i = this._index++;
37+
if (i > this._max)
38+
this.push(null);
39+
else
40+
this.push(new Buffer('a'));
41+
}
42+
});
43+
44+
readable._max = 1000;
45+
readable._index = 1;
46+
47+
var total = 0;
48+
readable.setEncoding('utf8');
49+
readable.on('data', function(chunk) {
50+
total += Buffer.byteLength(chunk);
51+
});
52+
53+
readable.on('end', function() {
54+
assert.equal(total, readable.bytesRead);
55+
});
56+
})();
57+
58+
(function() {
59+
const duplex = new Duplex({
60+
read: function(n) {
61+
var i = this._index++;
62+
if (i > this._max)
63+
this.push(null);
64+
else
65+
this.push(new Buffer('a'));
66+
},
67+
write: function(chunk, encoding, next) {
68+
next();
69+
}
70+
});
71+
72+
duplex._max = 1000;
73+
duplex._index = 1;
74+
75+
var total = 0;
76+
duplex.setEncoding('utf8');
77+
duplex.on('data', function(chunk) {
78+
total += Buffer.byteLength(chunk);
79+
});
80+
81+
duplex.on('end', function() {
82+
assert.equal(total, duplex.bytesRead);
83+
});
84+
})();
85+
86+
(function() {
87+
const readable = new Readable({
88+
read: function(n) {
89+
var i = this._index++;
90+
if (i > this._max)
91+
this.push(null);
92+
else
93+
this.push(new Buffer('{"key":"value"}'));
94+
}
95+
});
96+
readable._max = 1000;
97+
readable._index = 1;
98+
99+
const transform = new Transform({
100+
readableObjectMode : true,
101+
transform: function(chunk, encoding, next) {
102+
next(null, JSON.parse(chunk));
103+
},
104+
flush: function(done) {
105+
done();
106+
}
107+
});
108+
109+
var total = 0;
110+
readable.on('data', function(chunk) {
111+
total += chunk.length;
112+
});
113+
114+
transform.on('end', function() {
115+
assert.equal(0, transform.bytesRead);
116+
assert.equal(total, readable.bytesRead);
117+
});
118+
readable.pipe(transform);
119+
})();

0 commit comments

Comments
 (0)