@@ -115,20 +115,20 @@ that implements an HTTP server:
115
115
const http = require (' http' );
116
116
117
117
const server = http .createServer ((req , res ) => {
118
- // `req` is an http.IncomingMessage, which is a Readable Stream
119
- // `res` is an http.ServerResponse, which is a Writable Stream
118
+ // `req` is an http.IncomingMessage, which is a Readable Stream.
119
+ // `res` is an http.ServerResponse, which is a Writable Stream.
120
120
121
121
let body = ' ' ;
122
122
// Get the data as utf8 strings.
123
123
// If an encoding is not set, Buffer objects will be received.
124
124
req .setEncoding (' utf8' );
125
125
126
- // Readable streams emit 'data' events once a listener is added
126
+ // Readable streams emit 'data' events once a listener is added.
127
127
req .on (' data' , (chunk ) => {
128
128
body += chunk;
129
129
});
130
130
131
- // The 'end' event indicates that the entire body has been received
131
+ // The 'end' event indicates that the entire body has been received.
132
132
req .on (' end' , () => {
133
133
try {
134
134
const data = JSON .parse (body);
@@ -250,7 +250,7 @@ function writeOneMillionTimes(writer, data, encoding, callback) {
250
250
do {
251
251
i-- ;
252
252
if (i === 0 ) {
253
- // last time!
253
+ // Last time!
254
254
writer .write (data, encoding, callback);
255
255
} else {
256
256
// See if we should continue, or wait.
@@ -259,8 +259,8 @@ function writeOneMillionTimes(writer, data, encoding, callback) {
259
259
}
260
260
} while (i > 0 && ok);
261
261
if (i > 0 ) {
262
- // had to stop early!
263
- // write some more once it drains
262
+ // Had to stop early!
263
+ // Write some more once it drains.
264
264
writer .once (' drain' , write);
265
265
}
266
266
}
@@ -410,7 +410,7 @@ Calling the [`stream.write()`][stream-write] method after calling
410
410
[ ` stream.end() ` ] [ stream-end ] will raise an error.
411
411
412
412
``` js
413
- // Write 'hello, ' and then end with 'world!'
413
+ // Write 'hello, ' and then end with 'world!'.
414
414
const fs = require (' fs' );
415
415
const file = fs .createWriteStream (' example.txt' );
416
416
file .write (' hello, ' );
@@ -480,6 +480,15 @@ added: v11.4.0
480
480
481
481
Is ` true ` if it is safe to call [ ` writable.write() ` ] [ stream-write ] .
482
482
483
+ ##### writable.writableFinished
484
+ <!-- YAML
485
+ added: v12.6.0
486
+ -->
487
+
488
+ * {boolean}
489
+
490
+ Is ` true ` if after the [ ` 'finish' ` ] [ ] event has been emitted.
491
+
483
492
##### writable.writableHighWaterMark
484
493
<!-- YAML
485
494
added: v9.3.0
@@ -499,16 +508,6 @@ This property contains the number of bytes (or objects) in the queue
499
508
ready to be written. The value provides introspection data regarding
500
509
the status of the ` highWaterMark ` .
501
510
502
- ##### writable.writableFinished
503
- <!-- YAML
504
- added: v12.6.0
505
- -->
506
-
507
- * {boolean}
508
-
509
- Is ` true ` if all data has been flushed to the underlying system. After
510
- the [ ` 'finish' ` ] [ ] event has been emitted.
511
-
512
511
##### writable.writableObjectMode
513
512
<!-- YAML
514
513
added: v12.3.0
@@ -694,11 +693,11 @@ const writable = new Writable();
694
693
695
694
pass .pipe (writable);
696
695
pass .unpipe (writable);
697
- // readableFlowing is now false
696
+ // readableFlowing is now false.
698
697
699
698
pass .on (' data' , (chunk ) => { console .log (chunk .toString ()); });
700
- pass .write (' ok' ); // Will not emit 'data'
701
- pass .resume (); // Must be called to make stream emit 'data'
699
+ pass .write (' ok' ); // Will not emit 'data'.
700
+ pass .resume (); // Must be called to make stream emit 'data'.
702
701
```
703
702
704
703
While ` readable.readableFlowing ` is ` false ` , data may be accumulating
@@ -841,7 +840,7 @@ cause some amount of data to be read into an internal buffer.
841
840
``` javascript
842
841
const readable = getReadableStreamSomehow ();
843
842
readable .on (' readable' , function () {
844
- // There is some data to read now
843
+ // There is some data to read now.
845
844
let data;
846
845
847
846
while (data = this .read ()) {
@@ -986,7 +985,7 @@ named `file.txt`:
986
985
const fs = require (' fs' );
987
986
const readable = getReadableStreamSomehow ();
988
987
const writable = fs .createWriteStream (' file.txt' );
989
- // All the data from readable goes into 'file.txt'
988
+ // All the data from readable goes into 'file.txt'.
990
989
readable .pipe (writable);
991
990
```
992
991
It is possible to attach multiple ` Writable ` streams to a single ` Readable `
@@ -1061,7 +1060,7 @@ readable.on('readable', () => {
1061
1060
1062
1061
The ` while ` loop is necessary when processing data with
1063
1062
` readable.read() ` . Only after ` readable.read() ` returns ` null ` ,
1064
- [ ` 'readable' ` ] ( ) will be emitted.
1063
+ [ ` 'readable' ` ] [ ] will be emitted.
1065
1064
1066
1065
A ` Readable ` stream in object mode will always return a single item from
1067
1066
a call to [ ` readable.read(size) ` ] [ stream-read ] , regardless of the value of the
@@ -1192,7 +1191,7 @@ const fs = require('fs');
1192
1191
const readable = getReadableStreamSomehow ();
1193
1192
const writable = fs .createWriteStream (' file.txt' );
1194
1193
// All the data from readable goes into 'file.txt',
1195
- // but only for the first second
1194
+ // but only for the first second.
1196
1195
readable .pipe (writable);
1197
1196
setTimeout (() => {
1198
1197
console .log (' Stop writing to file.txt.' );
@@ -1231,9 +1230,9 @@ use of a [`Transform`][] stream instead. See the [API for Stream Implementers][]
1231
1230
section for more information.
1232
1231
1233
1232
``` js
1234
- // Pull off a header delimited by \n\n
1235
- // use unshift() if we get too much
1236
- // Call the callback with (error, header, stream)
1233
+ // Pull off a header delimited by \n\n.
1234
+ // Use unshift() if we get too much.
1235
+ // Call the callback with (error, header, stream).
1237
1236
const { StringDecoder } = require (' string_decoder' );
1238
1237
function parseHeader (stream , callback ) {
1239
1238
stream .on (' error' , callback);
@@ -1245,13 +1244,13 @@ function parseHeader(stream, callback) {
1245
1244
while (null !== (chunk = stream .read ())) {
1246
1245
const str = decoder .write (chunk);
1247
1246
if (str .match (/ \n\n / )) {
1248
- // Found the header boundary
1247
+ // Found the header boundary.
1249
1248
const split = str .split (/ \n\n / );
1250
1249
header += split .shift ();
1251
1250
const remaining = split .join (' \n\n ' );
1252
1251
const buf = Buffer .from (remaining, ' utf8' );
1253
1252
stream .removeListener (' error' , callback);
1254
- // Remove the 'readable' listener before unshifting
1253
+ // Remove the 'readable' listener before unshifting.
1255
1254
stream .removeListener (' readable' , onReadable);
1256
1255
if (buf .length )
1257
1256
stream .unshift (buf);
@@ -1323,13 +1322,13 @@ const fs = require('fs');
1323
1322
async function print (readable ) {
1324
1323
readable .setEncoding (' utf8' );
1325
1324
let data = ' ' ;
1326
- for await (const k of readable ) {
1327
- data += k ;
1325
+ for await (const chunk of readable ) {
1326
+ data += chunk ;
1328
1327
}
1329
1328
console .log (data);
1330
1329
}
1331
1330
1332
- print (fs .createReadStream (' file' )).catch (console .log );
1331
+ print (fs .createReadStream (' file' )).catch (console .error );
1333
1332
```
1334
1333
1335
1334
If the loop terminates with a ` break ` or a ` throw ` , the stream will be
@@ -1425,7 +1424,7 @@ finished(rs, (err) => {
1425
1424
}
1426
1425
});
1427
1426
1428
- rs .resume (); // drain the stream
1427
+ rs .resume (); // Drain the stream.
1429
1428
```
1430
1429
1431
1430
Especially useful in error handling scenarios where a stream is destroyed
@@ -1445,7 +1444,7 @@ async function run() {
1445
1444
}
1446
1445
1447
1446
run ().catch (console .error );
1448
- rs .resume (); // drain the stream
1447
+ rs .resume (); // Drain the stream.
1449
1448
```
1450
1449
1451
1450
### stream.pipeline(...streams, callback)
@@ -1508,6 +1507,7 @@ run().catch(console.error);
1508
1507
* ` options ` {Object} Options provided to ` new stream.Readable([options]) ` .
1509
1508
By default, ` Readable.from() ` will set ` options.objectMode ` to ` true ` , unless
1510
1509
this is explicitly opted out by setting ` options.objectMode ` to ` false ` .
1510
+ * Returns: {stream.Readable}
1511
1511
1512
1512
A utility method for creating Readable Streams out of iterators.
1513
1513
@@ -1555,10 +1555,10 @@ on the type of stream being created, as detailed in the chart below:
1555
1555
1556
1556
| Use-case | Class | Method(s) to implement |
1557
1557
| -------- | ----- | ---------------------- |
1558
- | Reading only | [ ` Readable ` ] | <code >[ _ read] [ stream-_read ] </code > |
1559
- | Writing only | [ ` Writable ` ] | <code >[ _ write] [ stream-_write ] </code >, <code >[ _ writev] [ stream-_writev ] </code >, <code >[ _ final] [ stream-_final ] </code > |
1560
- | Reading and writing | [ ` Duplex ` ] | <code >[ _ read] [ stream-_read ] </code >, <code >[ _ write] [ stream-_write ] </code >, <code >[ _ writev] [ stream-_writev ] </code >, <code >[ _ final] [ stream-_final ] </code > |
1561
- | Operate on written data, then read the result | [ ` Transform ` ] | <code >[ _ transform] [ stream-_transform ] </code >, <code >[ _ flush] [ stream-_flush ] </code >, <code >[ _ final] [ stream-_final ] </code > |
1558
+ | Reading only | [ ` Readable ` ] | <code >[ _ read() ] [ stream-_read ] </code > |
1559
+ | Writing only | [ ` Writable ` ] | <code >[ _ write() ] [ stream-_write ] </code >, <code >[ _ writev() ] [ stream-_writev ] </code >, <code >[ _ final() ] [ stream-_final ] </code > |
1560
+ | Reading and writing | [ ` Duplex ` ] | <code >[ _ read() ] [ stream-_read ] </code >, <code >[ _ write() ] [ stream-_write ] </code >, <code >[ _ writev() ] [ stream-_writev ] </code >, <code >[ _ final() ] [ stream-_final ] </code > |
1561
+ | Operate on written data, then read the result | [ ` Transform ` ] | <code >[ _ transform() ] [ stream-_transform ] </code >, <code >[ _ flush() ] [ stream-_flush ] </code >, <code >[ _ final() ] [ stream-_final ] </code > |
1562
1562
1563
1563
The implementation code for a stream should * never* call the "public" methods
1564
1564
of a stream that are intended for use by consumers (as described in the
@@ -1643,7 +1643,7 @@ const { Writable } = require('stream');
1643
1643
1644
1644
class MyWritable extends Writable {
1645
1645
constructor (options ) {
1646
- // Calls the stream.Writable() constructor
1646
+ // Calls the stream.Writable() constructor.
1647
1647
super (options);
1648
1648
// ...
1649
1649
}
@@ -1886,6 +1886,8 @@ changes:
1886
1886
* ` objectMode ` {boolean} Whether this stream should behave
1887
1887
as a stream of objects. Meaning that [ ` stream.read(n) ` ] [ stream-read ] returns
1888
1888
a single value instead of a ` Buffer ` of size ` n ` . ** Default:** ` false ` .
1889
+ * ` emitClose ` {boolean} Whether or not the stream should emit ` 'close' `
1890
+ after it has been destroyed. ** Default:** ` true ` .
1889
1891
* ` read ` {Function} Implementation for the [ ` stream._read() ` ] [ stream-_read ]
1890
1892
method.
1891
1893
* ` destroy ` {Function} Implementation for the
@@ -1899,7 +1901,7 @@ const { Readable } = require('stream');
1899
1901
1900
1902
class MyReadable extends Readable {
1901
1903
constructor (options ) {
1902
- // Calls the stream.Readable(options) constructor
1904
+ // Calls the stream.Readable(options) constructor.
1903
1905
super (options);
1904
1906
// ...
1905
1907
}
@@ -2026,18 +2028,18 @@ class SourceWrapper extends Readable {
2026
2028
2027
2029
// Every time there's data, push it into the internal buffer.
2028
2030
this ._source .ondata = (chunk ) => {
2029
- // If push() returns false, then stop reading from source
2031
+ // If push() returns false, then stop reading from source.
2030
2032
if (! this .push (chunk))
2031
2033
this ._source .readStop ();
2032
2034
};
2033
2035
2034
- // When the source ends, push the EOF-signaling `null` chunk
2036
+ // When the source ends, push the EOF-signaling `null` chunk.
2035
2037
this ._source .onend = () => {
2036
2038
this .push (null );
2037
2039
};
2038
2040
}
2039
- // _read will be called when the stream wants to pull more data in
2040
- // the advisory size argument is ignored in this case.
2041
+ // _read() will be called when the stream wants to pull more data in.
2042
+ // The advisory size argument is ignored in this case.
2041
2043
_read (size ) {
2042
2044
this ._source .readStart ();
2043
2045
}
@@ -2070,7 +2072,7 @@ const myReadable = new Readable({
2070
2072
process .nextTick (() => this .emit (' error' , err));
2071
2073
return ;
2072
2074
}
2073
- // do some work
2075
+ // Do some work.
2074
2076
}
2075
2077
});
2076
2078
```
@@ -2208,7 +2210,7 @@ class MyDuplex extends Duplex {
2208
2210
}
2209
2211
2210
2212
_write (chunk , encoding , callback ) {
2211
- // The underlying source only deals with strings
2213
+ // The underlying source only deals with strings.
2212
2214
if (Buffer .isBuffer (chunk))
2213
2215
chunk = chunk .toString ();
2214
2216
this [kSource].writeSomeData (chunk);
@@ -2241,12 +2243,12 @@ the `Readable` side.
2241
2243
``` js
2242
2244
const { Transform } = require (' stream' );
2243
2245
2244
- // All Transform streams are also Duplex Streams
2246
+ // All Transform streams are also Duplex Streams.
2245
2247
const myTransform = new Transform ({
2246
2248
writableObjectMode: true ,
2247
2249
2248
2250
transform (chunk , encoding , callback ) {
2249
- // Coerce the chunk to a number if necessary
2251
+ // Coerce the chunk to a number if necessary.
2250
2252
chunk |= 0 ;
2251
2253
2252
2254
// Transform the chunk into something else.
@@ -2385,7 +2387,7 @@ user programs.
2385
2387
[ ` stream.write() ` ] [ stream-write ] .
2386
2388
* ` encoding ` {string} If the chunk is a string, then this is the
2387
2389
encoding type. If chunk is a buffer, then this is the special
2388
- value - 'buffer', ignore it in this case.
2390
+ value - ` 'buffer' ` , ignore it in this case.
2389
2391
* ` callback ` {Function} A callback function (optionally with an error
2390
2392
argument and data) to be called after the supplied ` chunk ` has been
2391
2393
processed.
@@ -2493,12 +2495,12 @@ const writeable = fs.createWriteStream('./file');
2493
2495
2494
2496
(async function () {
2495
2497
for await (const chunk of iterator ) {
2496
- // Handle backpressure on write
2498
+ // Handle backpressure on write().
2497
2499
if (! writeable .write (chunk))
2498
2500
await once (writeable, ' drain' );
2499
2501
}
2500
2502
writeable .end ();
2501
- // Ensure completion without errors
2503
+ // Ensure completion without errors.
2502
2504
await once (writeable, ' finish' );
2503
2505
})();
2504
2506
```
@@ -2517,7 +2519,7 @@ const writeable = fs.createWriteStream('./file');
2517
2519
(async function () {
2518
2520
const readable = Readable .from (iterator);
2519
2521
readable .pipe (writeable);
2520
- // Ensure completion without errors
2522
+ // Ensure completion without errors.
2521
2523
await once (writeable, ' finish' );
2522
2524
})();
2523
2525
```
@@ -2560,7 +2562,7 @@ For example, consider the following code:
2560
2562
// WARNING! BROKEN!
2561
2563
net .createServer ((socket ) => {
2562
2564
2563
- // We add an 'end' listener, but never consume the data
2565
+ // We add an 'end' listener, but never consume the data.
2564
2566
socket .on (' end' , () => {
2565
2567
// It will never get here.
2566
2568
socket .end (' The message was received but was not processed.\n ' );
@@ -2576,7 +2578,7 @@ The workaround in this situation is to call the
2576
2578
[ ` stream.resume() ` ] [ stream-resume ] method to begin the flow of data:
2577
2579
2578
2580
``` js
2579
- // Workaround
2581
+ // Workaround.
2580
2582
net .createServer ((socket ) => {
2581
2583
socket .on (' end' , () => {
2582
2584
socket .end (' The message was received but was not processed.\n ' );
0 commit comments