From 5e5b1f8b891c5cd5465b03aa9bc8cd45f3e96592 Mon Sep 17 00:00:00 2001 From: Trevor Norris Date: Wed, 14 Dec 2016 17:17:10 -0700 Subject: [PATCH 001/144] src: return early if nextTickQueue is empty This brings the node::MakeCallback and node::AsyncWrap::MakeCallback implementations into alignment in that they return early if the nextTickQueue is empty after processing the MicrotaskQueue. Include test to make sure early return happens. Test has text explaining the conditions for the test to pass, since it relies on internal mechanisms that aren't guaranteed in the future. PR-URL: https://github.com/nodejs/node/pull/10274 Reviewed-By: Ben Noordhuis Reviewed-By: Anna Henningsen Reviewed-By: Jeremiah Senkpiel Reviewed-By: Colin Ihrig --- src/node.cc | 1 + test/parallel/test-no-enter-tickcallback.js | 32 +++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 test/parallel/test-no-enter-tickcallback.js diff --git a/src/node.cc b/src/node.cc index bdae47ee6c8211..bd8248b2e3c9b4 100644 --- a/src/node.cc +++ b/src/node.cc @@ -1274,6 +1274,7 @@ Local MakeCallback(Environment* env, if (tick_info->length() == 0) { tick_info->set_index(0); + return ret; } if (env->tick_callback_function()->Call(process, 0, nullptr).IsEmpty()) { diff --git a/test/parallel/test-no-enter-tickcallback.js b/test/parallel/test-no-enter-tickcallback.js new file mode 100644 index 00000000000000..e06628628955be --- /dev/null +++ b/test/parallel/test-no-enter-tickcallback.js @@ -0,0 +1,32 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +var allsGood = false; +var cntr = 0; + +process.on('exit', () => { + assert.ok(cntr > 0, '_tickDomainCallback was never called'); +}); + +/** + * This test relies upon the following internals to work as specified: + * - require('domain') causes node::Environment::set_tick_callback_function() + * to use process._tickDomainCallback() to process the nextTickQueue; + * replacing process._tickCallback(). + * - setImmediate() uses node::MakeCallback() instead of + * node::AsyncWrap::MakeCallback(). Otherwise the test will always pass. + * Have not found a way to verify that node::MakeCallback() is used. + */ +process._tickDomainCallback = function _tickDomainCallback() { + assert.ok(allsGood, '_tickDomainCallback should not have been called'); + cntr++; +}; + +setImmediate(common.mustCall(() => { + require('domain'); + setImmediate(common.mustCall(() => setImmediate(common.mustCall(() => { + allsGood = true; + process.nextTick(() => {}); + })))); +})); From d1843ec3a79030157dd205331947c66199ba30fc Mon Sep 17 00:00:00 2001 From: Trevor Norris Date: Wed, 21 Dec 2016 16:05:37 -0700 Subject: [PATCH 002/144] async_wrap: clear destroy_ids vector After processing all the callbacks in the destroy_ids vector make sure to clear() it otherwise the DestroyIdsCb() won't run again. PR-URL: https://github.com/nodejs/node/pull/10400 Fixes: b49b496 "async_wrap: call destroy() callback in uv_idle_t" Reviewed-By: Ben Noordhuis Reviewed-By: Anna Henningsen --- src/async-wrap.cc | 6 +++++- test/parallel/test-async-wrap-uid.js | 14 +++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/src/async-wrap.cc b/src/async-wrap.cc index 42463bd22b31f4..a0780566db72d8 100644 --- a/src/async-wrap.cc +++ b/src/async-wrap.cc @@ -199,7 +199,9 @@ void AsyncWrap::DestroyIdsCb(uv_idle_t* handle) { TryCatch try_catch(env->isolate()); - for (auto current_id : *env->destroy_ids_list()) { + std::vector destroy_ids_list; + destroy_ids_list.swap(*env->destroy_ids_list()); + for (auto current_id : destroy_ids_list) { // Want each callback to be cleaned up after itself, instead of cleaning // them all up after the while() loop completes. HandleScope scope(env->isolate()); @@ -212,6 +214,8 @@ void AsyncWrap::DestroyIdsCb(uv_idle_t* handle) { FatalException(env->isolate(), try_catch); } } + + env->destroy_ids_list()->clear(); } diff --git a/test/parallel/test-async-wrap-uid.js b/test/parallel/test-async-wrap-uid.js index 3497c3b0768ddd..19cb01cfa83c7a 100644 --- a/test/parallel/test-async-wrap-uid.js +++ b/test/parallel/test-async-wrap-uid.js @@ -5,8 +5,14 @@ const fs = require('fs'); const assert = require('assert'); const async_wrap = process.binding('async_wrap'); +// Give the event loop time to clear out the final uv_close(). +var si_cntr = 3; +process.on('beforeExit', () => { + if (--si_cntr > 0) setImmediate(() => {}); +}); + const storage = new Map(); -async_wrap.setupHooks({ init, pre, post }); +async_wrap.setupHooks({ init, pre, post, destroy }); async_wrap.enable(); function init(uid) { @@ -14,6 +20,7 @@ function init(uid) { init: true, pre: false, post: false, + destroy: false, }); } @@ -25,6 +32,10 @@ function post(uid) { storage.get(uid).post = true; } +function destroy(uid) { + storage.get(uid).destroy = true; +} + fs.access(__filename, function(err) { assert.ifError(err); }); @@ -46,6 +57,7 @@ process.once('exit', function() { init: true, pre: true, post: true, + destroy: true, }); } }); From abde7644a57cb642e02747b8ff8510a9baef1850 Mon Sep 17 00:00:00 2001 From: Anna Henningsen Date: Wed, 21 Dec 2016 08:27:34 +0100 Subject: [PATCH 003/144] fs: support Uint8Array input to methods MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Allow `fs.read`, `fs.write` and `fs.writeFile` to take `Uint8Array` arguments. PR-URL: https://github.com/nodejs/node/pull/10382 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas --- doc/api/fs.md | 12 +++--- lib/fs.js | 13 ++++--- src/node_util.cc | 3 +- test/parallel/test-fs-read-buffer.js | 39 ++++++++++++------- test/parallel/test-fs-write-buffer.js | 20 ++++++++++ .../parallel/test-fs-write-file-uint8array.js | 28 +++++++++++++ 6 files changed, 87 insertions(+), 28 deletions(-) create mode 100644 test/parallel/test-fs-write-file-uint8array.js diff --git a/doc/api/fs.md b/doc/api/fs.md index 9eeeffa85785d2..4651e08672dda5 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1281,7 +1281,7 @@ added: v0.0.2 --> * `fd` {Integer} -* `buffer` {String | Buffer} +* `buffer` {String | Buffer | Uint8Array} * `offset` {Integer} * `length` {Integer} * `position` {Integer} @@ -1427,7 +1427,7 @@ added: v0.1.21 --> * `fd` {Integer} -* `buffer` {String | Buffer} +* `buffer` {String | Buffer | Uint8Array} * `offset` {Integer} * `length` {Integer} * `position` {Integer} @@ -1824,7 +1824,7 @@ added: v0.0.2 --> * `fd` {Integer} -* `buffer` {Buffer} +* `buffer` {Buffer | Uint8Array} * `offset` {Integer} * `length` {Integer} * `position` {Integer} @@ -1891,7 +1891,7 @@ added: v0.1.29 --> * `file` {String | Buffer | Integer} filename or file descriptor -* `data` {String | Buffer} +* `data` {String | Buffer | Uint8Array} * `options` {Object | String} * `encoding` {String | Null} default = `'utf8'` * `mode` {Integer} default = `0o666` @@ -1934,7 +1934,7 @@ added: v0.1.29 --> * `file` {String | Buffer | Integer} filename or file descriptor -* `data` {String | Buffer} +* `data` {String | Buffer | Uint8Array} * `options` {Object | String} * `encoding` {String | Null} default = `'utf8'` * `mode` {Integer} default = `0o666` @@ -1948,7 +1948,7 @@ added: v0.1.21 --> * `fd` {Integer} -* `buffer` {Buffer} +* `buffer` {Buffer | Uint8Array} * `offset` {Integer} * `length` {Integer} * `position` {Integer} diff --git a/lib/fs.js b/lib/fs.js index 52f392d749364c..32d78b7b187b1f 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -6,6 +6,7 @@ const constants = process.binding('constants').fs; const util = require('util'); const pathModule = require('path'); +const { isUint8Array } = process.binding('util'); const binding = process.binding('fs'); const fs = exports; @@ -559,7 +560,7 @@ fs.openSync = function(path, flags, mode) { var readWarned = false; fs.read = function(fd, buffer, offset, length, position, callback) { - if (!(buffer instanceof Buffer)) { + if (!isUint8Array(buffer)) { // legacy string interface (fd, length, position, encoding, callback) if (!readWarned) { readWarned = true; @@ -623,7 +624,7 @@ fs.readSync = function(fd, buffer, offset, length, position) { var legacy = false; var encoding; - if (!(buffer instanceof Buffer)) { + if (!isUint8Array(buffer)) { // legacy string interface (fd, length, position, encoding, callback) if (!readSyncWarned) { readSyncWarned = true; @@ -674,7 +675,7 @@ fs.write = function(fd, buffer, offset, length, position, callback) { var req = new FSReqWrap(); req.oncomplete = wrapper; - if (buffer instanceof Buffer) { + if (isUint8Array(buffer)) { callback = maybeCallback(callback || position || length || offset); if (typeof offset !== 'number') { offset = 0; @@ -708,7 +709,7 @@ fs.write = function(fd, buffer, offset, length, position, callback) { // OR // fs.writeSync(fd, string[, position[, encoding]]); fs.writeSync = function(fd, buffer, offset, length, position) { - if (buffer instanceof Buffer) { + if (isUint8Array(buffer)) { if (position === undefined) position = null; if (typeof offset !== 'number') @@ -1206,7 +1207,7 @@ fs.writeFile = function(path, data, options, callback) { }); function writeFd(fd, isUserFd) { - var buffer = (data instanceof Buffer) ? + var buffer = isUint8Array(data) ? data : Buffer.from('' + data, options.encoding || 'utf8'); var position = /a/.test(flag) ? null : 0; @@ -1221,7 +1222,7 @@ fs.writeFileSync = function(path, data, options) { var isUserFd = isFd(path); // file descriptor ownership var fd = isUserFd ? path : fs.openSync(path, flag, options.mode); - if (!(data instanceof Buffer)) { + if (!isUint8Array(data)) { data = Buffer.from('' + data, options.encoding || 'utf8'); } var offset = 0; diff --git a/src/node_util.cc b/src/node_util.cc index c231983e57a2df..a1387353e3d9a5 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -29,7 +29,8 @@ using v8::Value; V(isSet, IsSet) \ V(isSetIterator, IsSetIterator) \ V(isSharedArrayBuffer, IsSharedArrayBuffer) \ - V(isTypedArray, IsTypedArray) + V(isTypedArray, IsTypedArray) \ + V(isUint8Array, IsUint8Array) #define V(_, ucname) \ diff --git a/test/parallel/test-fs-read-buffer.js b/test/parallel/test-fs-read-buffer.js index 32f52b13cc7594..82fb3c284fb98a 100644 --- a/test/parallel/test-fs-read-buffer.js +++ b/test/parallel/test-fs-read-buffer.js @@ -6,20 +6,29 @@ const Buffer = require('buffer').Buffer; const fs = require('fs'); const filepath = path.join(common.fixturesDir, 'x.txt'); const fd = fs.openSync(filepath, 'r'); -const expected = 'xyz\n'; -const bufferAsync = Buffer.allocUnsafe(expected.length); -const bufferSync = Buffer.allocUnsafe(expected.length); -fs.read(fd, - bufferAsync, - 0, - expected.length, - 0, - common.mustCall(function(err, bytesRead) { - assert.equal(bytesRead, expected.length); - assert.deepStrictEqual(bufferAsync, Buffer.from(expected)); - })); +const expected = Buffer.from('xyz\n'); -var r = fs.readSync(fd, bufferSync, 0, expected.length, 0); -assert.deepStrictEqual(bufferSync, Buffer.from(expected)); -assert.equal(r, expected.length); +function test(bufferAsync, bufferSync, expected) { + fs.read(fd, + bufferAsync, + 0, + expected.length, + 0, + common.mustCall((err, bytesRead) => { + assert.strictEqual(bytesRead, expected.length); + assert.deepStrictEqual(bufferAsync, Buffer.from(expected)); + })); + + const r = fs.readSync(fd, bufferSync, 0, expected.length, 0); + assert.deepStrictEqual(bufferSync, Buffer.from(expected)); + assert.strictEqual(r, expected.length); +} + +test(Buffer.allocUnsafe(expected.length), + Buffer.allocUnsafe(expected.length), + expected); + +test(new Uint8Array(expected.length), + new Uint8Array(expected.length), + Uint8Array.from(expected)); diff --git a/test/parallel/test-fs-write-buffer.js b/test/parallel/test-fs-write-buffer.js index ed77d697b33bd2..0e24f33872825f 100644 --- a/test/parallel/test-fs-write-buffer.js +++ b/test/parallel/test-fs-write-buffer.js @@ -106,3 +106,23 @@ common.refreshTmpDir(); fs.write(fd, expected, undefined, undefined, cb); })); } + +// fs.write with a Uint8Array, without the offset and length parameters: +{ + const filename = path.join(common.tmpDir, 'write6.txt'); + fs.open(filename, 'w', 0o644, common.mustCall(function(err, fd) { + assert.ifError(err); + + const cb = common.mustCall(function(err, written) { + assert.ifError(err); + + assert.strictEqual(expected.length, written); + fs.closeSync(fd); + + const found = fs.readFileSync(filename, 'utf8'); + assert.deepStrictEqual(expected.toString(), found); + }); + + fs.write(fd, Uint8Array.from(expected), cb); + })); +} diff --git a/test/parallel/test-fs-write-file-uint8array.js b/test/parallel/test-fs-write-file-uint8array.js new file mode 100644 index 00000000000000..219379c77a920d --- /dev/null +++ b/test/parallel/test-fs-write-file-uint8array.js @@ -0,0 +1,28 @@ +'use strict'; +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const join = require('path').join; + +common.refreshTmpDir(); + +const filename = join(common.tmpDir, 'test.txt'); + +const s = '南越国是前203年至前111年存在于岭南地区的一个国家,国都位于番禺,疆域包括今天中国的广东、' + + '广西两省区的大部份地区,福建省、湖南、贵州、云南的一小部份地区和越南的北部。' + + '南越国是秦朝灭亡后,由南海郡尉赵佗于前203年起兵兼并桂林郡和象郡后建立。' + + '前196年和前179年,南越国曾先后两次名义上臣属于西汉,成为西汉的“外臣”。前112年,' + + '南越国末代君主赵建德与西汉发生战争,被汉武帝于前111年所灭。南越国共存在93年,' + + '历经五代君主。南越国是岭南地区的第一个有记载的政权国家,采用封建制和郡县制并存的制度,' + + '它的建立保证了秦末乱世岭南地区社会秩序的稳定,有效的改善了岭南地区落后的政治、##济现状。\n'; + +const input = Uint8Array.from(Buffer.from(s, 'utf8')); + +fs.writeFileSync(filename, input); +assert.strictEqual(fs.readFileSync(filename, 'utf8'), s); + +fs.writeFile(filename, input, common.mustCall((e) => { + assert.ifError(e); + + assert.strictEqual(fs.readFileSync(filename, 'utf8'), s); +})); From 023956187e124172707761a238201ea2dc8c6b9b Mon Sep 17 00:00:00 2001 From: Eugene Ostroukhov Date: Wed, 9 Nov 2016 14:19:35 -0800 Subject: [PATCH 004/144] inspector: split HTTP/WS server from the inspector Both our team experiments and some embedder request indicate a potential in implementing alternative transport for inspector - e.g. IPC pipes or custom embedder APIs. This change moves all HTTP specific code into a separate class and is a first attempt at defining a boundary between the inspector agent and transport. This API will be refined as new transports are implemented. Note that even without considering alternative transports, this change enables better testing of the HTTP server (Valgrind made it possible to identify and fix some existing memory leaks). PR-URL: https://github.com/nodejs/node/pull/9630 Reviewed-By: James M Snell Reviewed-By: Ben Noordhuis --- node.gyp | 21 +- src/inspector_agent.cc | 424 +++++----------- src/inspector_agent.h | 3 +- src/inspector_socket_server.cc | 471 ++++++++++++++++++ src/inspector_socket_server.h | 77 +++ test/cctest/test_inspector_socket_server.cc | 517 ++++++++++++++++++++ 6 files changed, 1212 insertions(+), 301 deletions(-) create mode 100644 src/inspector_socket_server.cc create mode 100644 src/inspector_socket_server.h create mode 100644 test/cctest/test_inspector_socket_server.cc diff --git a/node.gyp b/node.gyp index f59037737c905a..e5f02d73086a09 100644 --- a/node.gyp +++ b/node.gyp @@ -318,8 +318,10 @@ 'sources': [ 'src/inspector_agent.cc', 'src/inspector_socket.cc', - 'src/inspector_socket.h', + 'src/inspector_socket_server.cc', 'src/inspector_agent.h', + 'src/inspector_socket.h', + 'src/inspector_socket_server.h', ], 'dependencies': [ 'deps/v8_inspector/src/inspector/inspector.gyp:standalone_inspector', @@ -868,7 +870,8 @@ 'dependencies': [ 'deps/gtest/gtest.gyp:gtest' ], 'include_dirs': [ 'src', - 'deps/v8/include' + 'deps/v8/include', + '<(SHARED_INTERMEDIATE_DIR)' ], 'defines': [ # gtest's ASSERT macros conflict with our own. @@ -886,9 +889,21 @@ 'conditions': [ ['v8_inspector=="true"', { + 'defines': [ + 'HAVE_INSPECTOR=1', + ], + 'dependencies': [ + 'deps/zlib/zlib.gyp:zlib', + 'v8_inspector_compress_protocol_json#host' + ], + 'include_dirs': [ + '<(SHARED_INTERMEDIATE_DIR)' + ], 'sources': [ 'src/inspector_socket.cc', - 'test/cctest/test_inspector_socket.cc' + 'src/inspector_socket_server.cc', + 'test/cctest/test_inspector_socket.cc', + 'test/cctest/test_inspector_socket_server.cc' ], 'conditions': [ [ 'node_shared_openssl=="false"', { diff --git a/src/inspector_agent.cc b/src/inspector_agent.cc index ec713942f50e7e..fc478c49a09d61 100644 --- a/src/inspector_agent.cc +++ b/src/inspector_agent.cc @@ -1,6 +1,6 @@ #include "inspector_agent.h" -#include "inspector_socket.h" +#include "inspector_socket_server.h" #include "env.h" #include "env-inl.h" #include "node.h" @@ -37,84 +37,6 @@ static const uint8_t PROTOCOL_JSON[] = { #include "v8_inspector_protocol_json.h" // NOLINT(build/include_order) }; -std::string GetWsUrl(int port, const std::string& id) { - char buf[1024]; - snprintf(buf, sizeof(buf), "127.0.0.1:%d/%s", port, id.c_str()); - return buf; -} - -void PrintDebuggerReadyMessage(int port, const std::string& id) { - fprintf(stderr, "Debugger listening on port %d.\n" - "Warning: This is an experimental feature and could change at any time.\n" - "To start debugging, open the following URL in Chrome:\n" - " chrome-devtools://devtools/bundled/inspector.html?" - "experiments=true&v8only=true&ws=%s\n", - port, GetWsUrl(port, id).c_str()); - fflush(stderr); -} - -std::string MapToString(const std::map object) { - std::ostringstream json; - json << "[ {\n"; - bool first = true; - for (const auto& name_value : object) { - if (!first) - json << ",\n"; - json << " \"" << name_value.first << "\": \""; - json << name_value.second << "\""; - first = false; - } - json << "\n} ]\n\n"; - return json.str(); -} - -void Escape(std::string* string) { - for (char& c : *string) { - c = (c == '\"' || c == '\\') ? '_' : c; - } -} - -void DisposeInspector(InspectorSocket* socket, int status) { - delete socket; -} - -void DisconnectAndDisposeIO(InspectorSocket* socket) { - if (socket) { - inspector_close(socket, DisposeInspector); - } -} - -void OnBufferAlloc(uv_handle_t* handle, size_t len, uv_buf_t* buf) { - buf->base = new char[len]; - buf->len = len; -} - -void SendHttpResponse(InspectorSocket* socket, const char* response, - size_t size) { - const char HEADERS[] = "HTTP/1.0 200 OK\r\n" - "Content-Type: application/json; charset=UTF-8\r\n" - "Cache-Control: no-cache\r\n" - "Content-Length: %zu\r\n" - "\r\n"; - char header[sizeof(HEADERS) + 20]; - int header_len = snprintf(header, sizeof(header), HEADERS, size); - inspector_write(socket, header, header_len); - inspector_write(socket, response, size); -} - -void SendHttpResponse(InspectorSocket* socket, const std::string& response) { - SendHttpResponse(socket, response.data(), response.size()); -} - -void SendVersionResponse(InspectorSocket* socket) { - static const char response[] = - "{\n" - " \"Browser\": \"node.js/" NODE_VERSION "\",\n" - " \"Protocol-Version\": \"1.1\"\n" - "}\n"; - SendHttpResponse(socket, response, sizeof(response) - 1); -} - std::string GetProcessTitle() { // uv_get_process_title will trim the title if it is too long. char title[2048]; @@ -126,36 +48,6 @@ std::string GetProcessTitle() { } } -void SendProtocolJson(InspectorSocket* socket) { - z_stream strm; - strm.zalloc = Z_NULL; - strm.zfree = Z_NULL; - strm.opaque = Z_NULL; - CHECK_EQ(Z_OK, inflateInit(&strm)); - static const size_t kDecompressedSize = - PROTOCOL_JSON[0] * 0x10000u + - PROTOCOL_JSON[1] * 0x100u + - PROTOCOL_JSON[2]; - strm.next_in = const_cast(PROTOCOL_JSON + 3); - strm.avail_in = sizeof(PROTOCOL_JSON) - 3; - std::string data(kDecompressedSize, '\0'); - strm.next_out = reinterpret_cast(&data[0]); - strm.avail_out = data.size(); - CHECK_EQ(Z_STREAM_END, inflate(&strm, Z_FINISH)); - CHECK_EQ(0, strm.avail_out); - CHECK_EQ(Z_OK, inflateEnd(&strm)); - SendHttpResponse(socket, data); -} - -const char* match_path_segment(const char* path, const char* expected) { - size_t len = strlen(expected); - if (StringEqualNoCaseN(path, expected, len)) { - if (path[len] == '/') return path + len + 1; - if (path[len] == '\0') return path + len; - } - return nullptr; -} - // UUID RFC: https://www.ietf.org/rfc/rfc4122.txt // Used ver 4 - with numbers std::string GenerateID() { @@ -201,17 +93,39 @@ std::string StringViewToUtf8(const StringView& view) { return result; } -std::unique_ptr Utf8ToStringView(const char* source, - size_t length) { - UnicodeString utf16 = UnicodeString::fromUTF8(StringPiece(source, length)); +std::unique_ptr Utf8ToStringView(const std::string& message) { + UnicodeString utf16 = + UnicodeString::fromUTF8(StringPiece(message.data(), message.length())); StringView view(reinterpret_cast(utf16.getBuffer()), utf16.length()); return StringBuffer::create(view); } + } // namespace class V8NodeInspector; +class InspectorAgentDelegate: public node::inspector::SocketServerDelegate { + public: + InspectorAgentDelegate(AgentImpl* agent, const std::string& script_path, + const std::string& script_name, bool wait); + bool StartSession(int session_id, const std::string& target_id) override; + void MessageReceived(int session_id, const std::string& message) override; + void EndSession(int session_id) override; + std::vector GetTargetIds() override; + std::string GetTargetTitle(const std::string& id) override; + std::string GetTargetUrl(const std::string& id) override; + bool IsConnected() { return connected_; } + private: + AgentImpl* agent_; + bool connected_; + int session_id_; + const std::string script_name_; + const std::string script_path_; + const std::string target_id_; + bool waiting_; +}; + class AgentImpl { public: explicit AgentImpl(node::Environment* env); @@ -223,42 +137,37 @@ class AgentImpl { void Stop(); bool IsStarted(); - bool IsConnected() { return state_ == State::kConnected; } + bool IsConnected(); void WaitForDisconnect(); void FatalException(v8::Local error, v8::Local message); + void PostIncomingMessage(int session_id, const std::string& message); + void ResumeStartup() { + uv_sem_post(&start_sem_); + } + private: using MessageQueue = std::vector>>; enum class State { kNew, kAccepting, kConnected, kDone, kError }; static void ThreadCbIO(void* agent); - static void OnSocketConnectionIO(uv_stream_t* server, int status); - static bool OnInspectorHandshakeIO(InspectorSocket* socket, - enum inspector_handshake_event state, - const std::string& path); static void WriteCbIO(uv_async_t* async); void InstallInspectorOnProcess(); void WorkerRunIO(); - void OnInspectorConnectionIO(InspectorSocket* socket); - void OnRemoteDataIO(InspectorSocket* stream, ssize_t read, - const uv_buf_t* b); void SetConnected(bool connected); void DispatchMessages(); void Write(int session_id, const StringView& message); bool AppendMessage(MessageQueue* vector, int session_id, std::unique_ptr buffer); void SwapBehindLock(MessageQueue* vector1, MessageQueue* vector2); - void PostIncomingMessage(const char* message, size_t len); void WaitForFrontendMessage(); void NotifyMessageReceived(); State ToState(State state); - void SendListResponse(InspectorSocket* socket); - bool RespondToGet(InspectorSocket* socket, const std::string& path); uv_sem_t start_sem_; ConditionVariable incoming_message_cond_; @@ -266,6 +175,8 @@ class AgentImpl { uv_thread_t thread_; uv_loop_t child_loop_; + InspectorAgentDelegate* delegate_; + int port_; bool wait_; bool shutting_down_; @@ -274,18 +185,15 @@ class AgentImpl { uv_async_t* data_written_; uv_async_t io_thread_req_; - InspectorSocket* client_socket_; V8NodeInspector* inspector_; v8::Platform* platform_; MessageQueue incoming_message_queue_; MessageQueue outgoing_message_queue_; bool dispatching_messages_; - int frontend_session_id_; - int backend_session_id_; + int session_id_; + InspectorSocketServer* server_; std::string script_name_; - std::string script_path_; - const std::string id_; friend class ChannelImpl; friend class DispatchOnInspectorBackendTask; @@ -300,11 +208,6 @@ void InterruptCallback(v8::Isolate*, void* agent) { static_cast(agent)->DispatchMessages(); } -void DataCallback(uv_stream_t* stream, ssize_t read, const uv_buf_t* buf) { - InspectorSocket* socket = inspector_from_stream(stream); - static_cast(socket->data)->OnRemoteDataIO(socket, read, buf); -} - class DispatchOnInspectorBackendTask : public v8::Task { public: explicit DispatchOnInspectorBackendTask(AgentImpl* agent) : agent_(agent) {} @@ -333,7 +236,7 @@ class ChannelImpl final : public v8_inspector::V8Inspector::Channel { void flushProtocolNotifications() override { } void sendMessageToFrontend(const StringView& message) { - agent_->Write(agent_->frontend_session_id_, message); + agent_->Write(agent_->session_id_, message); } AgentImpl* const agent_; @@ -414,19 +317,18 @@ class V8NodeInspector : public v8_inspector::V8InspectorClient { std::unique_ptr session_; }; -AgentImpl::AgentImpl(Environment* env) : port_(0), +AgentImpl::AgentImpl(Environment* env) : delegate_(nullptr), + port_(0), wait_(false), shutting_down_(false), state_(State::kNew), parent_env_(env), data_written_(new uv_async_t()), - client_socket_(nullptr), inspector_(nullptr), platform_(nullptr), dispatching_messages_(false), - frontend_session_id_(0), - backend_session_id_(0), - id_(GenerateID()) { + session_id_(0), + server_(nullptr) { CHECK_EQ(0, uv_sem_init(&start_sem_, 0)); memset(&io_thread_req_, 0, sizeof(io_thread_req_)); CHECK_EQ(0, uv_async_init(env->event_loop(), data_written_, nullptr)); @@ -543,6 +445,10 @@ void AgentImpl::Stop() { delete inspector_; } +bool AgentImpl::IsConnected() { + return delegate_ != nullptr && delegate_->IsConnected(); +} + bool AgentImpl::IsStarted() { return !!platform_; } @@ -550,6 +456,9 @@ bool AgentImpl::IsStarted() { void AgentImpl::WaitForDisconnect() { if (state_ == State::kConnected) { shutting_down_ = true; + // Gives a signal to stop accepting new connections + // TODO(eugeneo): Introduce an API with explicit request names. + Write(0, StringView()); fprintf(stderr, "Waiting for the debugger to disconnect...\n"); fflush(stderr); inspector_->runMessageLoopOnPause(0); @@ -621,181 +530,59 @@ void AgentImpl::ThreadCbIO(void* agent) { static_cast(agent)->WorkerRunIO(); } -// static -void AgentImpl::OnSocketConnectionIO(uv_stream_t* server, int status) { - if (status == 0) { - InspectorSocket* socket = new InspectorSocket(); - socket->data = server->data; - if (inspector_accept(server, socket, - AgentImpl::OnInspectorHandshakeIO) != 0) { - delete socket; - } - } -} - -// static -bool AgentImpl::OnInspectorHandshakeIO(InspectorSocket* socket, - enum inspector_handshake_event state, - const std::string& path) { - AgentImpl* agent = static_cast(socket->data); - switch (state) { - case kInspectorHandshakeHttpGet: - return agent->RespondToGet(socket, path); - case kInspectorHandshakeUpgrading: - return path.length() == agent->id_.length() + 1 && - path.find(agent->id_) == 1; - case kInspectorHandshakeUpgraded: - agent->OnInspectorConnectionIO(socket); - return true; - case kInspectorHandshakeFailed: - delete socket; - return false; - default: - UNREACHABLE(); - return false; - } -} - -void AgentImpl::OnRemoteDataIO(InspectorSocket* socket, - ssize_t read, - const uv_buf_t* buf) { - if (read > 0) { - // TODO(pfeldman): Instead of blocking execution while debugger - // engages, node should wait for the run callback from the remote client - // and initiate its startup. This is a change to node.cc that should be - // upstreamed separately. - if (wait_) { - std::string message(buf->base, read); - if (message.find("\"Runtime.runIfWaitingForDebugger\"") != - std::string::npos) { - wait_ = false; - uv_sem_post(&start_sem_); - } - } - PostIncomingMessage(buf->base, read); - } else { - // EOF - if (client_socket_ == socket) { - client_socket_ = nullptr; - PostIncomingMessage(TAG_DISCONNECT, sizeof(TAG_DISCONNECT) - 1); - } - DisconnectAndDisposeIO(socket); - } - if (buf) { - delete[] buf->base; - } -} - -void AgentImpl::SendListResponse(InspectorSocket* socket) { - std::map response; - response["description"] = "node.js instance"; - response["faviconUrl"] = "https://nodejs.org/static/favicon.ico"; - response["id"] = id_; - response["title"] = script_name_.empty() ? GetProcessTitle() : script_name_; - Escape(&response["title"]); - response["type"] = "node"; - // This attribute value is a "best effort" URL that is passed as a JSON - // string. It is not guaranteed to resolve to a valid resource. - response["url"] = "file://" + script_path_; - Escape(&response["url"]); - - if (!client_socket_) { - std::string address = GetWsUrl(port_, id_); - - std::ostringstream frontend_url; - frontend_url << "chrome-devtools://devtools/bundled"; - frontend_url << "/inspector.html?experiments=true&v8only=true&ws="; - frontend_url << address; - - response["devtoolsFrontendUrl"] += frontend_url.str(); - response["webSocketDebuggerUrl"] = "ws://" + address; - } - SendHttpResponse(socket, MapToString(response)); -} - -bool AgentImpl::RespondToGet(InspectorSocket* socket, const std::string& path) { - const char* command = match_path_segment(path.c_str(), "/json"); - if (command == nullptr) - return false; - - if (match_path_segment(command, "list") || command[0] == '\0') { - SendListResponse(socket); - return true; - } else if (match_path_segment(command, "protocol")) { - SendProtocolJson(socket); - return true; - } else if (match_path_segment(command, "version")) { - SendVersionResponse(socket); - return true; - } else if (const char* pid = match_path_segment(command, "activate")) { - if (pid != id_) - return false; - SendHttpResponse(socket, "Target activated"); - return true; - } - return false; -} - // static void AgentImpl::WriteCbIO(uv_async_t* async) { AgentImpl* agent = static_cast(async->data); - InspectorSocket* socket = agent->client_socket_; - if (socket) { - MessageQueue outgoing_messages; - agent->SwapBehindLock(&agent->outgoing_message_queue_, &outgoing_messages); - for (const MessageQueue::value_type& outgoing : outgoing_messages) { - if (outgoing.first == agent->frontend_session_id_) { - StringView message = outgoing.second->string(); - std::string utf8Message = StringViewToUtf8(message); - inspector_write(socket, utf8Message.c_str(), utf8Message.length()); - } + MessageQueue outgoing_messages; + agent->SwapBehindLock(&agent->outgoing_message_queue_, &outgoing_messages); + for (const MessageQueue::value_type& outgoing : outgoing_messages) { + StringView view = outgoing.second->string(); + if (view.length() == 0) { + agent->server_->Stop(nullptr); + } else { + agent->server_->Send(outgoing.first, + StringViewToUtf8(outgoing.second->string())); } } } void AgentImpl::WorkerRunIO() { - sockaddr_in addr; - uv_tcp_t server; int err = uv_loop_init(&child_loop_); CHECK_EQ(err, 0); err = uv_async_init(&child_loop_, &io_thread_req_, AgentImpl::WriteCbIO); CHECK_EQ(err, 0); io_thread_req_.data = this; + std::string script_path; if (!script_name_.empty()) { uv_fs_t req; if (0 == uv_fs_realpath(&child_loop_, &req, script_name_.c_str(), nullptr)) - script_path_ = std::string(reinterpret_cast(req.ptr)); + script_path = std::string(reinterpret_cast(req.ptr)); uv_fs_req_cleanup(&req); } - uv_tcp_init(&child_loop_, &server); - uv_ip4_addr("0.0.0.0", port_, &addr); - server.data = this; - err = uv_tcp_bind(&server, - reinterpret_cast(&addr), 0); - if (err == 0) { - err = uv_listen(reinterpret_cast(&server), 1, - OnSocketConnectionIO); - } - if (err != 0) { + InspectorAgentDelegate delegate(this, script_path, script_name_, wait_); + delegate_ = &delegate; + InspectorSocketServer server(&delegate, port_); + if (!server.Start(&child_loop_)) { fprintf(stderr, "Unable to open devtools socket: %s\n", uv_strerror(err)); state_ = State::kError; // Safe, main thread is waiting on semaphore uv_close(reinterpret_cast(&io_thread_req_), nullptr); - uv_close(reinterpret_cast(&server), nullptr); uv_loop_close(&child_loop_); uv_sem_post(&start_sem_); return; } - PrintDebuggerReadyMessage(port_, id_); + server_ = &server; if (!wait_) { uv_sem_post(&start_sem_); } uv_run(&child_loop_, UV_RUN_DEFAULT); uv_close(reinterpret_cast(&io_thread_req_), nullptr); - uv_close(reinterpret_cast(&server), nullptr); - DisconnectAndDisposeIO(client_socket_); + server.Stop(nullptr); + server.TerminateConnections(nullptr); uv_run(&child_loop_, UV_RUN_NOWAIT); err = uv_loop_close(&child_loop_); CHECK_EQ(err, 0); + delegate_ = nullptr; + server_ = nullptr; } bool AgentImpl::AppendMessage(MessageQueue* queue, int session_id, @@ -811,9 +598,10 @@ void AgentImpl::SwapBehindLock(MessageQueue* vector1, MessageQueue* vector2) { vector1->swap(*vector2); } -void AgentImpl::PostIncomingMessage(const char* message, size_t len) { - if (AppendMessage(&incoming_message_queue_, frontend_session_id_, - Utf8ToStringView(message, len))) { +void AgentImpl::PostIncomingMessage(int session_id, + const std::string& message) { + if (AppendMessage(&incoming_message_queue_, session_id, + Utf8ToStringView(message))) { v8::Isolate* isolate = parent_env_->isolate(); platform_->CallOnForegroundThread(isolate, new DispatchOnInspectorBackendTask(this)); @@ -834,17 +622,6 @@ void AgentImpl::NotifyMessageReceived() { incoming_message_cond_.Broadcast(scoped_lock); } -void AgentImpl::OnInspectorConnectionIO(InspectorSocket* socket) { - if (client_socket_) { - DisconnectAndDisposeIO(socket); - return; - } - client_socket_ = socket; - inspector_read_start(socket, OnBufferAlloc, DataCallback); - frontend_session_id_++; - PostIncomingMessage(TAG_CONNECT, sizeof(TAG_CONNECT) - 1); -} - void AgentImpl::DispatchMessages() { // This function can be reentered if there was an incoming message while // V8 was processing another inspector request (e.g. if the user is @@ -867,7 +644,7 @@ void AgentImpl::DispatchMessages() { if (tag == TAG_CONNECT) { CHECK_EQ(State::kAccepting, state_); - backend_session_id_++; + session_id_ = pair.first; state_ = State::kConnected; fprintf(stderr, "Debugger attached.\n"); inspector_->connectFrontend(); @@ -876,7 +653,6 @@ void AgentImpl::DispatchMessages() { if (shutting_down_) { state_ = State::kDone; } else { - PrintDebuggerReadyMessage(port_, id_); state_ = State::kAccepting; } inspector_->quitMessageLoopOnPause(); @@ -930,6 +706,60 @@ void Agent::FatalException(v8::Local error, impl->FatalException(error, message); } +InspectorAgentDelegate::InspectorAgentDelegate(AgentImpl* agent, + const std::string& script_path, + const std::string& script_name, + bool wait) + : agent_(agent), + connected_(false), + session_id_(0), + script_name_(script_name), + script_path_(script_path), + target_id_(GenerateID()), + waiting_(wait) { } + + +bool InspectorAgentDelegate::StartSession(int session_id, + const std::string& target_id) { + if (connected_) + return false; + connected_ = true; + agent_->PostIncomingMessage(session_id, TAG_CONNECT); + return true; +} + +void InspectorAgentDelegate::MessageReceived(int session_id, + const std::string& message) { + // TODO(pfeldman): Instead of blocking execution while debugger + // engages, node should wait for the run callback from the remote client + // and initiate its startup. This is a change to node.cc that should be + // upstreamed separately. + if (waiting_) { + if (message.find("\"Runtime.runIfWaitingForDebugger\"") != + std::string::npos) { + waiting_ = false; + agent_->ResumeStartup(); + } + } + agent_->PostIncomingMessage(session_id, message); +} + +void InspectorAgentDelegate::EndSession(int session_id) { + connected_ = false; + agent_->PostIncomingMessage(session_id, TAG_DISCONNECT); +} + +std::vector InspectorAgentDelegate::GetTargetIds() { + return { target_id_ }; +} + +std::string InspectorAgentDelegate::GetTargetTitle(const std::string& id) { + return script_name_.empty() ? GetProcessTitle() : script_name_; +} + +std::string InspectorAgentDelegate::GetTargetUrl(const std::string& id) { + return "file://" + script_path_; +} } // namespace inspector } // namespace node diff --git a/src/inspector_agent.h b/src/inspector_agent.h index 3607cffba5d21f..b31c77496b3d70 100644 --- a/src/inspector_agent.h +++ b/src/inspector_agent.h @@ -1,6 +1,8 @@ #ifndef SRC_INSPECTOR_AGENT_H_ #define SRC_INSPECTOR_AGENT_H_ +#include + #if !HAVE_INSPECTOR #error("This header can only be used when inspector is enabled") #endif @@ -36,7 +38,6 @@ class Agent { bool IsStarted(); bool IsConnected(); void WaitForDisconnect(); - void FatalException(v8::Local error, v8::Local message); private: diff --git a/src/inspector_socket_server.cc b/src/inspector_socket_server.cc new file mode 100644 index 00000000000000..e05a0c577da7b1 --- /dev/null +++ b/src/inspector_socket_server.cc @@ -0,0 +1,471 @@ +#include "inspector_socket_server.h" + +#include "node.h" +#include "uv.h" +#include "zlib.h" + +#include +#include +#include +#include + +namespace node { +namespace inspector { + +namespace { + +static const uint8_t PROTOCOL_JSON[] = { + #include "v8_inspector_protocol_json.h" // NOLINT(build/include_order) +}; + +void Escape(std::string* string) { + for (char& c : *string) { + c = (c == '\"' || c == '\\') ? '_' : c; + } +} + +std::string GetWsUrl(int port, const std::string& id) { + char buf[1024]; + snprintf(buf, sizeof(buf), "127.0.0.1:%d/%s", port, id.c_str()); + return buf; +} + +std::string MapToString(const std::map& object) { + bool first = true; + std::ostringstream json; + json << "{\n"; + for (const auto& name_value : object) { + if (!first) + json << ",\n"; + first = false; + json << " \"" << name_value.first << "\": \""; + json << name_value.second << "\""; + } + json << "\n} "; + return json.str(); +} + +std::string MapsToString( + const std::vector>& array) { + bool first = true; + std::ostringstream json; + json << "[ "; + for (const auto& object : array) { + if (!first) + json << ", "; + first = false; + json << MapToString(object); + } + json << "]\n\n"; + return json.str(); +} + +const char* MatchPathSegment(const char* path, const char* expected) { + size_t len = strlen(expected); + if (StringEqualNoCaseN(path, expected, len)) { + if (path[len] == '/') return path + len + 1; + if (path[len] == '\0') return path + len; + } + return nullptr; +} + +void OnBufferAlloc(uv_handle_t* handle, size_t len, uv_buf_t* buf) { + buf->base = new char[len]; + buf->len = len; +} + +void PrintDebuggerReadyMessage(int port, const std::vector& ids) { + fprintf(stderr, + "Debugger listening on port %d.\n" + "Warning: This is an experimental feature " + "and could change at any time.\n", + port); + if (ids.size() == 1) + fprintf(stderr, "To start debugging, open the following URL in Chrome:\n"); + if (ids.size() > 1) + fprintf(stderr, "To start debugging, open the following URLs in Chrome:\n"); + for (const std::string& id : ids) { + fprintf(stderr, + " chrome-devtools://devtools/bundled/inspector.html?" + "experiments=true&v8only=true&ws=%s\n", GetWsUrl(port, id).c_str()); + } + fflush(stderr); +} + +void SendHttpResponse(InspectorSocket* socket, const std::string& response) { + const char HEADERS[] = "HTTP/1.0 200 OK\r\n" + "Content-Type: application/json; charset=UTF-8\r\n" + "Cache-Control: no-cache\r\n" + "Content-Length: %zu\r\n" + "\r\n"; + char header[sizeof(HEADERS) + 20]; + int header_len = snprintf(header, sizeof(header), HEADERS, response.size()); + inspector_write(socket, header, header_len); + inspector_write(socket, response.data(), response.size()); +} + +void SendVersionResponse(InspectorSocket* socket) { + std::map response; + response["Browser"] = "node.js/" NODE_VERSION; + response["Protocol-Version"] = "1.1"; + SendHttpResponse(socket, MapToString(response)); +} + +void SendProtocolJson(InspectorSocket* socket) { + z_stream strm; + strm.zalloc = Z_NULL; + strm.zfree = Z_NULL; + strm.opaque = Z_NULL; + CHECK_EQ(Z_OK, inflateInit(&strm)); + static const size_t kDecompressedSize = + PROTOCOL_JSON[0] * 0x10000u + + PROTOCOL_JSON[1] * 0x100u + + PROTOCOL_JSON[2]; + strm.next_in = const_cast(PROTOCOL_JSON + 3); + strm.avail_in = sizeof(PROTOCOL_JSON) - 3; + std::string data(kDecompressedSize, '\0'); + strm.next_out = reinterpret_cast(&data[0]); + strm.avail_out = data.size(); + CHECK_EQ(Z_STREAM_END, inflate(&strm, Z_FINISH)); + CHECK_EQ(0, strm.avail_out); + CHECK_EQ(Z_OK, inflateEnd(&strm)); + SendHttpResponse(socket, data); +} + +} // namespace + + +class Closer { + public: + explicit Closer(InspectorSocketServer* server) : server_(server), + close_count_(0) { } + + void AddCallback(InspectorSocketServer::ServerCallback callback) { + if (callback == nullptr) + return; + callbacks_.insert(callback); + } + + void DecreaseExpectedCount() { + --close_count_; + NotifyIfDone(); + } + + void IncreaseExpectedCount() { + ++close_count_; + } + + void NotifyIfDone() { + if (close_count_ == 0) { + for (auto callback : callbacks_) { + callback(server_); + } + InspectorSocketServer* server = server_; + delete server->closer_; + server->closer_ = nullptr; + } + } + + private: + InspectorSocketServer* server_; + std::set callbacks_; + int close_count_; +}; + +class SocketSession { + public: + SocketSession(InspectorSocketServer* server, int id); + void Close(bool socket_cleanup, Closer* closer); + void Declined() { state_ = State::kDeclined; } + static SocketSession* From(InspectorSocket* socket) { + return node::ContainerOf(&SocketSession::socket_, socket); + } + void FrontendConnected(); + InspectorSocketServer* GetServer() { return server_; } + int Id() { return id_; } + void Send(const std::string& message); + void SetTargetId(const std::string& target_id) { + CHECK(target_id_.empty()); + target_id_ = target_id; + } + InspectorSocket* Socket() { return &socket_; } + const std::string TargetId() { return target_id_; } + + private: + enum class State { kHttp, kWebSocket, kClosing, kEOF, kDeclined }; + static void CloseCallback_(InspectorSocket* socket, int code); + static void ReadCallback_(uv_stream_t* stream, ssize_t read, + const uv_buf_t* buf); + void OnRemoteDataIO(InspectorSocket* socket, ssize_t read, + const uv_buf_t* buf); + const int id_; + Closer* closer_; + InspectorSocket socket_; + InspectorSocketServer* server_; + std::string target_id_; + State state_; +}; + +InspectorSocketServer::InspectorSocketServer(SocketServerDelegate* delegate, + int port) : loop_(nullptr), + delegate_(delegate), + port_(port), + closer_(nullptr), + next_session_id_(0) { } + + +// static +bool InspectorSocketServer::HandshakeCallback(InspectorSocket* socket, + inspector_handshake_event event, + const std::string& path) { + InspectorSocketServer* server = SocketSession::From(socket)->GetServer(); + const std::string& id = path.empty() ? path : path.substr(1); + switch (event) { + case kInspectorHandshakeHttpGet: + return server->RespondToGet(socket, path); + case kInspectorHandshakeUpgrading: + return server->SessionStarted(SocketSession::From(socket), id); + case kInspectorHandshakeUpgraded: + SocketSession::From(socket)->FrontendConnected(); + return true; + case kInspectorHandshakeFailed: + SocketSession::From(socket)->Close(false, nullptr); + return false; + default: + UNREACHABLE(); + return false; + } +} + +bool InspectorSocketServer::SessionStarted(SocketSession* session, + const std::string& id) { + bool connected = false; + if (TargetExists(id)) { + connected = delegate_->StartSession(session->Id(), id); + } + if (connected) { + connected_sessions_[session->Id()] = session; + session->SetTargetId(id); + } else { + session->Declined(); + } + return connected; +} + +void InspectorSocketServer::SessionTerminated(int session_id) { + if (connected_sessions_.erase(session_id) == 0) { + return; + } + delegate_->EndSession(session_id); + if (connected_sessions_.empty() && + uv_is_active(reinterpret_cast(&server_))) { + PrintDebuggerReadyMessage(port_, delegate_->GetTargetIds()); + } +} + +bool InspectorSocketServer::RespondToGet(InspectorSocket* socket, + const std::string& path) { + const char* command = MatchPathSegment(path.c_str(), "/json"); + if (command == nullptr) + return false; + + if (MatchPathSegment(command, "list") || command[0] == '\0') { + SendListResponse(socket); + return true; + } else if (MatchPathSegment(command, "protocol")) { + SendProtocolJson(socket); + return true; + } else if (MatchPathSegment(command, "version")) { + SendVersionResponse(socket); + return true; + } else if (const char* target_id = MatchPathSegment(command, "activate")) { + if (TargetExists(target_id)) { + SendHttpResponse(socket, "Target activated"); + return true; + } + return false; + } + return false; +} + +void InspectorSocketServer::SendListResponse(InspectorSocket* socket) { + std::vector> response; + for (const std::string& id : delegate_->GetTargetIds()) { + response.push_back(std::map()); + std::map& target_map = response.back(); + target_map["description"] = "node.js instance"; + target_map["faviconUrl"] = "https://nodejs.org/static/favicon.ico"; + target_map["id"] = id; + target_map["title"] = delegate_->GetTargetTitle(id); + Escape(&target_map["title"]); + target_map["type"] = "node"; + // This attribute value is a "best effort" URL that is passed as a JSON + // string. It is not guaranteed to resolve to a valid resource. + target_map["url"] = delegate_->GetTargetUrl(id); + Escape(&target_map["url"]); + + bool connected = false; + for (const auto& session : connected_sessions_) { + if (session.second->TargetId() == id) { + connected = true; + break; + } + } + if (!connected) { + std::string address = GetWsUrl(port_, id); + std::ostringstream frontend_url; + frontend_url << "chrome-devtools://devtools/bundled"; + frontend_url << "/inspector.html?experiments=true&v8only=true&ws="; + frontend_url << address; + target_map["devtoolsFrontendUrl"] += frontend_url.str(); + target_map["webSocketDebuggerUrl"] = "ws://" + address; + } + } + SendHttpResponse(socket, MapsToString(response)); +} + +bool InspectorSocketServer::Start(uv_loop_t* loop) { + loop_ = loop; + sockaddr_in addr; + uv_tcp_init(loop_, &server_); + uv_ip4_addr("0.0.0.0", port_, &addr); + int err = uv_tcp_bind(&server_, + reinterpret_cast(&addr), 0); + if (err == 0) { + err = uv_listen(reinterpret_cast(&server_), 1, + SocketConnectedCallback); + } + if (err == 0 && connected_sessions_.empty()) { + PrintDebuggerReadyMessage(port_, delegate_->GetTargetIds()); + } + if (err != 0 && connected_sessions_.empty()) { + fprintf(stderr, "Unable to open devtools socket: %s\n", uv_strerror(err)); + uv_close(reinterpret_cast(&server_), nullptr); + return false; + } + return true; +} + +void InspectorSocketServer::Stop(ServerCallback cb) { + if (closer_ == nullptr) { + closer_ = new Closer(this); + } + closer_->AddCallback(cb); + + uv_handle_t* handle = reinterpret_cast(&server_); + if (uv_is_active(handle)) { + closer_->IncreaseExpectedCount(); + uv_close(reinterpret_cast(&server_), ServerClosedCallback); + } + closer_->NotifyIfDone(); +} + +void InspectorSocketServer::TerminateConnections(ServerCallback cb) { + if (closer_ == nullptr) { + closer_ = new Closer(this); + } + closer_->AddCallback(cb); + std::map sessions; + std::swap(sessions, connected_sessions_); + for (const auto& session : sessions) { + int id = session.second->Id(); + session.second->Close(true, closer_); + delegate_->EndSession(id); + } + closer_->NotifyIfDone(); +} + +bool InspectorSocketServer::TargetExists(const std::string& id) { + const std::vector& target_ids = delegate_->GetTargetIds(); + const auto& found = std::find(target_ids.begin(), target_ids.end(), id); + return found != target_ids.end(); +} + +void InspectorSocketServer::Send(int session_id, const std::string& message) { + auto session_iterator = connected_sessions_.find(session_id); + if (session_iterator != connected_sessions_.end()) { + session_iterator->second->Send(message); + } +} + +// static +void InspectorSocketServer::ServerClosedCallback(uv_handle_t* server) { + InspectorSocketServer* socket_server = InspectorSocketServer::From(server); + if (socket_server->closer_) + socket_server->closer_->DecreaseExpectedCount(); +} + +// static +void InspectorSocketServer::SocketConnectedCallback(uv_stream_t* server, + int status) { + if (status == 0) { + InspectorSocketServer* socket_server = InspectorSocketServer::From(server); + SocketSession* session = + new SocketSession(socket_server, socket_server->next_session_id_++); + if (inspector_accept(server, session->Socket(), HandshakeCallback) != 0) { + delete session; + } + } +} + +// InspectorSession tracking +SocketSession::SocketSession(InspectorSocketServer* server, int id) + : id_(id), closer_(nullptr), server_(server), + state_(State::kHttp) { } + +void SocketSession::Close(bool socket_cleanup, Closer* closer) { + CHECK_EQ(closer_, nullptr); + CHECK_NE(state_, State::kClosing); + server_->SessionTerminated(id_); + if (socket_cleanup) { + state_ = State::kClosing; + closer_ = closer; + if (closer_ != nullptr) + closer->IncreaseExpectedCount(); + inspector_close(&socket_, CloseCallback_); + } else { + delete this; + } +} + +// static +void SocketSession::CloseCallback_(InspectorSocket* socket, int code) { + SocketSession* session = SocketSession::From(socket); + CHECK_EQ(State::kClosing, session->state_); + Closer* closer = session->closer_; + if (closer != nullptr) + closer->DecreaseExpectedCount(); + delete session; +} + +void SocketSession::FrontendConnected() { + CHECK_EQ(State::kHttp, state_); + state_ = State::kWebSocket; + inspector_read_start(&socket_, OnBufferAlloc, ReadCallback_); +} + +// static +void SocketSession::ReadCallback_(uv_stream_t* stream, ssize_t read, + const uv_buf_t* buf) { + InspectorSocket* socket = inspector_from_stream(stream); + SocketSession::From(socket)->OnRemoteDataIO(socket, read, buf); +} + +void SocketSession::OnRemoteDataIO(InspectorSocket* socket, ssize_t read, + const uv_buf_t* buf) { + if (read > 0) { + server_->Delegate()->MessageReceived(id_, std::string(buf->base, read)); + } else { + server_->SessionTerminated(id_); + Close(true, nullptr); + } + if (buf != nullptr && buf->base != nullptr) + delete[] buf->base; +} + +void SocketSession::Send(const std::string& message) { + inspector_write(&socket_, message.data(), message.length()); +} + +} // namespace inspector +} // namespace node diff --git a/src/inspector_socket_server.h b/src/inspector_socket_server.h new file mode 100644 index 00000000000000..4c139e138f7547 --- /dev/null +++ b/src/inspector_socket_server.h @@ -0,0 +1,77 @@ +#ifndef SRC_INSPECTOR_SOCKET_SERVER_H_ +#define SRC_INSPECTOR_SOCKET_SERVER_H_ + +#include "inspector_agent.h" +#include "inspector_socket.h" +#include "uv.h" + +#include +#include +#include + +#if !HAVE_INSPECTOR +#error("This header can only be used when inspector is enabled") +#endif + +namespace node { +namespace inspector { + +class Closer; +class SocketSession; + +class SocketServerDelegate { + public: + virtual bool StartSession(int session_id, const std::string& target_id) = 0; + virtual void EndSession(int session_id) = 0; + virtual void MessageReceived(int session_id, const std::string& message) = 0; + virtual std::vector GetTargetIds() = 0; + virtual std::string GetTargetTitle(const std::string& id) = 0; + virtual std::string GetTargetUrl(const std::string& id) = 0; +}; + +class InspectorSocketServer { + public: + using ServerCallback = void (*)(InspectorSocketServer*); + InspectorSocketServer(SocketServerDelegate* delegate, int port); + bool Start(uv_loop_t* loop); + void Stop(ServerCallback callback); + void Send(int session_id, const std::string& message); + void TerminateConnections(ServerCallback callback); + + private: + static bool HandshakeCallback(InspectorSocket* socket, + enum inspector_handshake_event state, + const std::string& path); + static void SocketConnectedCallback(uv_stream_t* server, int status); + static void ServerClosedCallback(uv_handle_t* server); + template + static InspectorSocketServer* From(SomeUvStruct* server) { + return node::ContainerOf(&InspectorSocketServer::server_, + reinterpret_cast(server)); + } + bool RespondToGet(InspectorSocket* socket, const std::string& path); + void SendListResponse(InspectorSocket* socket); + void ReadCallback(InspectorSocket* socket, ssize_t read, const uv_buf_t* buf); + bool SessionStarted(SocketSession* session, const std::string& id); + void SessionTerminated(int id); + bool TargetExists(const std::string& id); + static void SocketSessionDeleter(SocketSession*); + SocketServerDelegate* Delegate() { return delegate_; } + + uv_loop_t* loop_; + SocketServerDelegate* const delegate_; + const int port_; + std::string path_; + uv_tcp_t server_; + Closer* closer_; + std::map connected_sessions_; + int next_session_id_; + + friend class SocketSession; + friend class Closer; +}; + +} // namespace inspector +} // namespace node + +#endif // SRC_INSPECTOR_SOCKET_SERVER_H_ diff --git a/test/cctest/test_inspector_socket_server.cc b/test/cctest/test_inspector_socket_server.cc new file mode 100644 index 00000000000000..d253df5dd9c593 --- /dev/null +++ b/test/cctest/test_inspector_socket_server.cc @@ -0,0 +1,517 @@ +#include "inspector_socket_server.h" + +#include "node.h" +#include "gtest/gtest.h" + +#include +#include + +static const int PORT = 9229; +static uv_loop_t loop; + +static const char CLIENT_CLOSE_FRAME[] = "\x88\x80\x2D\x0E\x1E\xFA"; +static const char SERVER_CLOSE_FRAME[] = "\x88\x00"; + +static const char MAIN_TARGET_ID[] = "main-target"; +static const char UNCONNECTABLE_TARGET_ID[] = "unconnectable-target"; + +static const char WS_HANDSHAKE_RESPONSE[] = + "HTTP/1.1 101 Switching Protocols\r\n" + "Upgrade: websocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Accept: Dt87H1OULVZnSJo/KgMUYI7xPCg=\r\n\r\n"; + +#define SPIN_WHILE(condition) \ + { \ + Timeout timeout(&loop); \ + while ((condition) && !timeout.timed_out) { \ + uv_run(&loop, UV_RUN_NOWAIT); \ + } \ + ASSERT_FALSE((condition)); \ + } + +namespace { + +using InspectorSocketServer = node::inspector::InspectorSocketServer; +using SocketServerDelegate = node::inspector::SocketServerDelegate; + +class Timeout { + public: + explicit Timeout(uv_loop_t* loop) : timed_out(false), done_(false) { + uv_timer_init(loop, &timer_); + uv_timer_start(&timer_, Timeout::set_flag, 5000, 0); + } + + ~Timeout() { + uv_timer_stop(&timer_); + uv_close(reinterpret_cast(&timer_), mark_done); + while (!done_) { + uv_run(&loop, UV_RUN_NOWAIT); + } + } + bool timed_out; + + private: + static void set_flag(uv_timer_t* timer) { + Timeout* t = node::ContainerOf(&Timeout::timer_, timer); + t->timed_out = true; + } + + static void mark_done(uv_handle_t* timer) { + Timeout* t = node::ContainerOf(&Timeout::timer_, + reinterpret_cast(timer)); + t->done_ = true; + } + + bool done_; + uv_timer_t timer_; +}; + +class InspectorSocketServerTest : public ::testing::Test { + protected: + void SetUp() override { + uv_loop_init(&loop); + } + + void TearDown() override { + const int err = uv_loop_close(&loop); + if (err != 0) { + uv_print_all_handles(&loop, stderr); + } + EXPECT_EQ(0, err); + } +}; + +class TestInspectorServerDelegate : public SocketServerDelegate { + public: + TestInspectorServerDelegate() : connected(0), disconnected(0), + targets_({ MAIN_TARGET_ID, + UNCONNECTABLE_TARGET_ID }) {} + + void Connect(InspectorSocketServer* server) { + server_ = server; + } + + bool StartSession(int session_id, const std::string& target_id) override { + buffer_.clear(); + CHECK_NE(targets_.end(), + std::find(targets_.begin(), targets_.end(), target_id)); + if (target_id == UNCONNECTABLE_TARGET_ID) { + return false; + } + connected++; + session_id_ = session_id; + return true; + } + + void MessageReceived(int session_id, const std::string& message) override { + ASSERT_EQ(session_id_, session_id); + buffer_.insert(buffer_.end(), message.begin(), message.end()); + } + + void EndSession(int session_id) override { + ASSERT_EQ(session_id_, session_id); + disconnected++; + } + + std::vector GetTargetIds() override { + return targets_; + } + + std::string GetTargetTitle(const std::string& id) override { + return id + " Target Title"; + } + + std::string GetTargetUrl(const std::string& id) override { + return "file://" + id + "/script.js"; + } + + void Expect(const std::string& expects) { + SPIN_WHILE(buffer_.size() < expects.length()); + ASSERT_STREQ(std::string(buffer_.data(), expects.length()).c_str(), + expects.c_str()); + buffer_.erase(buffer_.begin(), buffer_.begin() + expects.length()); + } + + void Write(const std::string& message) { + server_->Send(session_id_, message); + } + + int connected; + int disconnected; + + private: + const std::vector targets_; + InspectorSocketServer* server_; + int session_id_; + std::vector buffer_; +}; + +class SocketWrapper { + public: + explicit SocketWrapper(uv_loop_t* loop) : closed_(false), + eof_(false), + loop_(loop), + connected_(false), + sending_(false) { } + + void Connect(std::string host, int port) { + closed_ = false; + connection_failed_ = false; + connected_ = false; + eof_ = false; + contents_.clear(); + uv_tcp_init(loop_, &socket_); + sockaddr_in addr; + uv_ip4_addr(host.c_str(), PORT, &addr); + int err = uv_tcp_connect(&connect_, &socket_, + reinterpret_cast(&addr), + Connected_); + ASSERT_EQ(0, err); + SPIN_WHILE(!connected_) + uv_read_start(reinterpret_cast(&socket_), AllocCallback, + ReadCallback); + } + + void ExpectFailureToConnect(std::string host, int port) { + connected_ = false; + connection_failed_ = false; + closed_ = false; + eof_ = false; + contents_.clear(); + uv_tcp_init(loop_, &socket_); + sockaddr_in addr; + uv_ip4_addr(host.c_str(), PORT, &addr); + int err = uv_tcp_connect(&connect_, &socket_, + reinterpret_cast(&addr), + ConnectionMustFail_); + ASSERT_EQ(0, err); + SPIN_WHILE(!connection_failed_) + uv_read_start(reinterpret_cast(&socket_), AllocCallback, + ReadCallback); + } + + void Close() { + uv_close(reinterpret_cast(&socket_), ClosedCallback); + SPIN_WHILE(!closed_); + } + + void Expect(const std::string& expects) { + SPIN_WHILE(contents_.size() < expects.length()); + ASSERT_STREQ(expects.c_str(), + std::string(contents_.data(), expects.length()).c_str()); + contents_.erase(contents_.begin(), contents_.begin() + expects.length()); + } + + void ExpectEOF() { + SPIN_WHILE(!eof_); + Close(); + } + + void TestHttpRequest(const std::string& path, + const std::string& expected_reply) { + std::ostringstream expectations; + expectations << "HTTP/1.0 200 OK\r\n" + "Content-Type: application/json; charset=UTF-8\r\n" + "Cache-Control: no-cache\r\n" + "Content-Length: "; + expectations << expected_reply.length() + 2; + expectations << "\r\n\r\n" << expected_reply << "\n\n"; + Write("GET " + path + " HTTP/1.1\r\n" + "Host: localhost:9229\r\n\r\n"); + Expect(expectations.str()); + } + + void Write(const std::string& data) { + ASSERT_FALSE(sending_); + uv_buf_t buf[1]; + buf[0].base = const_cast(data.data()); + buf[0].len = data.length(); + sending_ = true; + int err = uv_write(&write_, reinterpret_cast(&socket_), + buf, 1, WriteDone_); + ASSERT_EQ(err, 0); + SPIN_WHILE(sending_); + } + + private: + static void AllocCallback(uv_handle_t*, size_t size, uv_buf_t* buf) { + *buf = uv_buf_init(new char[size], size); + } + + static void ClosedCallback(uv_handle_t* handle) { + SocketWrapper* wrapper = + node::ContainerOf(&SocketWrapper::socket_, + reinterpret_cast(handle)); + ASSERT_FALSE(wrapper->closed_); + wrapper->closed_ = true; + } + + static void Connected_(uv_connect_t* connect, int status) { + EXPECT_EQ(0, status); + SocketWrapper* wrapper = + node::ContainerOf(&SocketWrapper::connect_, connect); + wrapper->connected_ = true; + } + + static void ConnectionMustFail_(uv_connect_t* connect, int status) { + EXPECT_EQ(UV_ECONNREFUSED, status); + SocketWrapper* wrapper = + node::ContainerOf(&SocketWrapper::connect_, connect); + wrapper->connection_failed_ = true; + } + + static void ReadCallback(uv_stream_t* stream, ssize_t read, + const uv_buf_t* buf) { + SocketWrapper* wrapper = + node::ContainerOf(&SocketWrapper::socket_, + reinterpret_cast(stream)); + if (read == UV_EOF) { + wrapper->eof_ = true; + } else { + wrapper->contents_.insert(wrapper->contents_.end(), buf->base, + buf->base + read); + } + delete[] buf->base; + } + static void WriteDone_(uv_write_t* req, int err) { + ASSERT_EQ(0, err); + SocketWrapper* wrapper = + node::ContainerOf(&SocketWrapper::write_, req); + ASSERT_TRUE(wrapper->sending_); + wrapper->sending_ = false; + } + bool IsConnected() { return connected_; } + + bool closed_; + bool eof_; + uv_loop_t* loop_; + uv_tcp_t socket_; + uv_connect_t connect_; + uv_write_t write_; + bool connected_; + bool connection_failed_; + bool sending_; + std::vector contents_; +}; + +class ServerHolder { + public: + template + ServerHolder(Delegate* delegate, int port) + : closed(false), paused(false), sessions_terminated(false), + server_(delegate, port) { + delegate->Connect(&server_); + } + + InspectorSocketServer* operator->() { + return &server_; + } + + static void CloseCallback(InspectorSocketServer* server) { + ServerHolder* holder = node::ContainerOf(&ServerHolder::server_, server); + holder->closed = true; + } + + static void ConnectionsTerminated(InspectorSocketServer* server) { + ServerHolder* holder = node::ContainerOf(&ServerHolder::server_, server); + holder->sessions_terminated = true; + } + + static void PausedCallback(InspectorSocketServer* server) { + ServerHolder* holder = node::ContainerOf(&ServerHolder::server_, server); + holder->paused = true; + } + + bool closed; + bool paused; + bool sessions_terminated; + + private: + InspectorSocketServer server_; +}; + +class ServerDelegateNoTargets : public SocketServerDelegate { + public: + void Connect(InspectorSocketServer* server) { } + void MessageReceived(int session_id, const std::string& message) override { } + void EndSession(int session_id) override { } + + bool StartSession(int session_id, const std::string& target_id) override { + return false; + } + + std::vector GetTargetIds() override { + return std::vector(); + } + + std::string GetTargetTitle(const std::string& id) override { + return ""; + } + + std::string GetTargetUrl(const std::string& id) override { + return ""; + } +}; + +static void TestHttpRequest(int port, const std::string& path, + const std::string& expected_body) { + SocketWrapper socket(&loop); + socket.Connect("0.0.0.0", port); + socket.TestHttpRequest(path, expected_body); + socket.Close(); +} + +static const std::string WsHandshakeRequest(const std::string& target_id) { + return "GET /" + target_id + " HTTP/1.1\r\n" + "Host: localhost:9229\r\n" + "Upgrade: websocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Key: aaa==\r\n" + "Sec-WebSocket-Version: 13\r\n\r\n"; +} +} // namespace + + +TEST_F(InspectorSocketServerTest, InspectorSessions) { + TestInspectorServerDelegate delegate; + ServerHolder server(&delegate, PORT); + ASSERT_TRUE(server->Start(&loop)); + + SocketWrapper well_behaved_socket(&loop); + // Regular connection + well_behaved_socket.Connect("0.0.0.0", PORT); + well_behaved_socket.Write(WsHandshakeRequest(MAIN_TARGET_ID)); + well_behaved_socket.Expect(WS_HANDSHAKE_RESPONSE); + + + EXPECT_EQ(1, delegate.connected); + + well_behaved_socket.Write("\x81\x84\x7F\xC2\x66\x31\x4E\xF0\x55\x05"); + + delegate.Expect("1234"); + delegate.Write("5678"); + + well_behaved_socket.Expect("\x81\x4" "5678"); + + well_behaved_socket.Write(CLIENT_CLOSE_FRAME); + well_behaved_socket.Expect(SERVER_CLOSE_FRAME); + + EXPECT_EQ(1, delegate.disconnected); + + well_behaved_socket.Close(); + + // Declined connection + SocketWrapper declined_target_socket(&loop); + declined_target_socket.Connect("127.0.0.1", PORT); + declined_target_socket.Write(WsHandshakeRequest(UNCONNECTABLE_TARGET_ID)); + declined_target_socket.Expect("HTTP/1.0 400 Bad Request"); + declined_target_socket.ExpectEOF(); + EXPECT_EQ(1, delegate.connected); + EXPECT_EQ(1, delegate.disconnected); + + // Bogus target - start session callback should not even be invoked + SocketWrapper bogus_target_socket(&loop); + bogus_target_socket.Connect("127.0.0.1", PORT); + bogus_target_socket.Write(WsHandshakeRequest("bogus_target")); + bogus_target_socket.Expect("HTTP/1.0 400 Bad Request"); + bogus_target_socket.ExpectEOF(); + EXPECT_EQ(1, delegate.connected); + EXPECT_EQ(1, delegate.disconnected); + + // Drop connection (no proper close frames) + SocketWrapper dropped_connection_socket(&loop); + dropped_connection_socket.Connect("127.0.0.1", PORT); + dropped_connection_socket.Write(WsHandshakeRequest(MAIN_TARGET_ID)); + dropped_connection_socket.Expect(WS_HANDSHAKE_RESPONSE); + + EXPECT_EQ(2, delegate.connected); + + delegate.Write("5678"); + dropped_connection_socket.Expect("\x81\x4" "5678"); + + dropped_connection_socket.Close(); + SPIN_WHILE(delegate.disconnected < 2); + + // Reconnect regular connection + SocketWrapper stays_till_termination_socket(&loop); + stays_till_termination_socket.Connect("127.0.0.1", PORT); + stays_till_termination_socket.Write(WsHandshakeRequest(MAIN_TARGET_ID)); + stays_till_termination_socket.Expect(WS_HANDSHAKE_RESPONSE); + + EXPECT_EQ(3, delegate.connected); + + delegate.Write("5678"); + stays_till_termination_socket.Expect("\x81\x4" "5678"); + + stays_till_termination_socket + .Write("\x81\x84\x7F\xC2\x66\x31\x4E\xF0\x55\x05"); + delegate.Expect("1234"); + + server->Stop(ServerHolder::CloseCallback); + server->TerminateConnections(ServerHolder::ConnectionsTerminated); + + stays_till_termination_socket.Write(CLIENT_CLOSE_FRAME); + stays_till_termination_socket.Expect(SERVER_CLOSE_FRAME); + + EXPECT_EQ(3, delegate.disconnected); + + SPIN_WHILE(!server.closed); + stays_till_termination_socket.ExpectEOF(); +} + +TEST_F(InspectorSocketServerTest, ServerDoesNothing) { + TestInspectorServerDelegate delegate; + ServerHolder server(&delegate, PORT); + ASSERT_TRUE(server->Start(&loop)); + + server->Stop(ServerHolder::CloseCallback); + server->TerminateConnections(ServerHolder::ConnectionsTerminated); + SPIN_WHILE(!server.closed); +} + +TEST_F(InspectorSocketServerTest, ServerWithoutTargets) { + ServerDelegateNoTargets delegate; + ServerHolder server(&delegate, PORT); + ASSERT_TRUE(server->Start(&loop)); + TestHttpRequest(PORT, "/json/list", "[ ]"); + TestHttpRequest(PORT, "/json", "[ ]"); + + // Declined connection + SocketWrapper socket(&loop); + socket.Connect("0.0.0.0", PORT); + socket.Write(WsHandshakeRequest(UNCONNECTABLE_TARGET_ID)); + socket.Expect("HTTP/1.0 400 Bad Request"); + socket.ExpectEOF(); + server->Stop(ServerHolder::CloseCallback); + server->TerminateConnections(ServerHolder::ConnectionsTerminated); + SPIN_WHILE(!server.closed); +} + +TEST_F(InspectorSocketServerTest, ServerCannotStart) { + ServerDelegateNoTargets delegate1, delegate2; + ServerHolder server1(&delegate1, PORT); + ASSERT_TRUE(server1->Start(&loop)); + ServerHolder server2(&delegate2, PORT); + ASSERT_FALSE(server2->Start(&loop)); + server1->Stop(ServerHolder::CloseCallback); + server1->TerminateConnections(ServerHolder::ConnectionsTerminated); + server2->Stop(ServerHolder::CloseCallback); + server2->TerminateConnections(ServerHolder::ConnectionsTerminated); + SPIN_WHILE(!server1.closed); + SPIN_WHILE(!server2.closed); +} + +TEST_F(InspectorSocketServerTest, StoppingServerDoesNotKillConnections) { + ServerDelegateNoTargets delegate; + ServerHolder server(&delegate, PORT); + ASSERT_TRUE(server->Start(&loop)); + SocketWrapper socket1(&loop); + socket1.Connect("0.0.0.0", PORT); + socket1.TestHttpRequest("/json/list", "[ ]"); + server->Stop(ServerHolder::CloseCallback); + SPIN_WHILE(!server.closed); + socket1.TestHttpRequest("/json/list", "[ ]"); + socket1.Close(); + uv_run(&loop, UV_RUN_DEFAULT); +} From 4bed9475d1b92d78c631f81bbe9f4ec3fd1c0407 Mon Sep 17 00:00:00 2001 From: Eugene Ostroukhov Date: Mon, 12 Dec 2016 10:51:18 -0800 Subject: [PATCH 005/144] inspector: fix Coverity defects One defect remains - Coverity believes that a session object is never freed while in reality its lifespan is tied to a libuv socket. PR-URL: https://github.com/nodejs/node/pull/10240 Reviewed-By: Ben Noordhuis Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Ali Ijaz Sheikh --- src/inspector_socket_server.cc | 2 ++ test/cctest/test_inspector_socket.cc | 10 +++++----- test/cctest/test_inspector_socket_server.cc | 4 +++- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/inspector_socket_server.cc b/src/inspector_socket_server.cc index e05a0c577da7b1..4bd35ae8bed4fb 100644 --- a/src/inspector_socket_server.cc +++ b/src/inspector_socket_server.cc @@ -210,6 +210,7 @@ InspectorSocketServer::InspectorSocketServer(SocketServerDelegate* delegate, int port) : loop_(nullptr), delegate_(delegate), port_(port), + server_(uv_tcp_t()), closer_(nullptr), next_session_id_(0) { } @@ -400,6 +401,7 @@ void InspectorSocketServer::SocketConnectedCallback(uv_stream_t* server, int status) { if (status == 0) { InspectorSocketServer* socket_server = InspectorSocketServer::From(server); + // Memory is freed when the socket closes. SocketSession* session = new SocketSession(socket_server, socket_server->next_session_id_++); if (inspector_accept(server, session->Socket(), HandshakeCallback) != 0) { diff --git a/test/cctest/test_inspector_socket.cc b/test/cctest/test_inspector_socket.cc index ada3df3d438ce8..b61fbd2cd6475d 100644 --- a/test/cctest/test_inspector_socket.cc +++ b/test/cctest/test_inspector_socket.cc @@ -370,13 +370,13 @@ class InspectorSocketTest : public ::testing::Test { uv_tcp_init(&loop, &client_socket); uv_ip4_addr("127.0.0.1", PORT, &addr); uv_tcp_bind(&server, reinterpret_cast(&addr), 0); - int err = uv_listen(reinterpret_cast(&server), - 1, on_new_connection); - GTEST_ASSERT_EQ(0, err); + GTEST_ASSERT_EQ(0, uv_listen(reinterpret_cast(&server), + 1, on_new_connection)); uv_connect_t connect; connect.data = nullptr; - uv_tcp_connect(&connect, &client_socket, - reinterpret_cast(&addr), on_connection); + GTEST_ASSERT_EQ(0, uv_tcp_connect(&connect, &client_socket, + reinterpret_cast(&addr), + on_connection)); uv_tcp_nodelay(&client_socket, 1); // The buffering messes up the test SPIN_WHILE(!connect.data || !connected); really_close(reinterpret_cast(&server)); diff --git a/test/cctest/test_inspector_socket_server.cc b/test/cctest/test_inspector_socket_server.cc index d253df5dd9c593..2bbc3811390fe6 100644 --- a/test/cctest/test_inspector_socket_server.cc +++ b/test/cctest/test_inspector_socket_server.cc @@ -86,7 +86,8 @@ class TestInspectorServerDelegate : public SocketServerDelegate { public: TestInspectorServerDelegate() : connected(0), disconnected(0), targets_({ MAIN_TARGET_ID, - UNCONNECTABLE_TARGET_ID }) {} + UNCONNECTABLE_TARGET_ID }), + session_id_(0) {} void Connect(InspectorSocketServer* server) { server_ = server; @@ -152,6 +153,7 @@ class SocketWrapper { explicit SocketWrapper(uv_loop_t* loop) : closed_(false), eof_(false), loop_(loop), + socket_(uv_tcp_t()), connected_(false), sending_(false) { } From 76b0e5bfbe67a374acfaab4034ddfff5a3594e31 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 16 Dec 2016 22:05:01 -0800 Subject: [PATCH 006/144] test: refactor test-timers-this MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * use common.mustCall() and eliminate exit handler * provide timer durtion of 1ms where previously omitted * var -> const PR-URL: https://github.com/nodejs/node/pull/10315 Reviewed-By: Michaël Zasso Reviewed-By: Jeremiah Senkpiel --- test/parallel/test-timers-this.js | 56 +++++++++++-------------------- 1 file changed, 20 insertions(+), 36 deletions(-) diff --git a/test/parallel/test-timers-this.js b/test/parallel/test-timers-this.js index e21167581e8944..6bd4033756626e 100644 --- a/test/parallel/test-timers-this.js +++ b/test/parallel/test-timers-this.js @@ -1,45 +1,29 @@ 'use strict'; -require('../common'); -var assert = require('assert'); +const common = require('../common'); +const assert = require('assert'); -let immediateThis, intervalThis, timeoutThis; -let immediateArgsThis, intervalArgsThis, timeoutArgsThis; +const immediateHandler = setImmediate(common.mustCall(function() { + assert.strictEqual(this, immediateHandler); +})); -var immediateHandler = setImmediate(function() { - immediateThis = this; -}); +const immediateArgsHandler = setImmediate(common.mustCall(function() { + assert.strictEqual(this, immediateArgsHandler); +}), 'args ...'); -var immediateArgsHandler = setImmediate(function() { - immediateArgsThis = this; -}, 'args ...'); - -var intervalHandler = setInterval(function() { +const intervalHandler = setInterval(common.mustCall(function() { clearInterval(intervalHandler); + assert.strictEqual(this, intervalHandler); +}), 1); - intervalThis = this; -}); - -var intervalArgsHandler = setInterval(function() { +const intervalArgsHandler = setInterval(common.mustCall(function() { clearInterval(intervalArgsHandler); + assert.strictEqual(this, intervalArgsHandler); +}), 1, 'args ...'); - intervalArgsThis = this; -}, 0, 'args ...'); - -var timeoutHandler = setTimeout(function() { - timeoutThis = this; -}); - -var timeoutArgsHandler = setTimeout(function() { - timeoutArgsThis = this; -}, 0, 'args ...'); - -process.once('exit', function() { - assert.strictEqual(immediateThis, immediateHandler); - assert.strictEqual(immediateArgsThis, immediateArgsHandler); - - assert.strictEqual(intervalThis, intervalHandler); - assert.strictEqual(intervalArgsThis, intervalArgsHandler); +const timeoutHandler = setTimeout(common.mustCall(function() { + assert.strictEqual(this, timeoutHandler); +}), 1); - assert.strictEqual(timeoutThis, timeoutHandler); - assert.strictEqual(timeoutArgsThis, timeoutArgsHandler); -}); +const timeoutArgsHandler = setTimeout(common.mustCall(function() { + assert.strictEqual(this, timeoutArgsHandler); +}), 1, 'args ...'); From 37563fafca14e8994e5dd7f42d2d475df401e174 Mon Sep 17 00:00:00 2001 From: Emanuel Buholzer Date: Mon, 19 Dec 2016 13:59:19 +0100 Subject: [PATCH 007/144] doc: fix broken link in COLLABORATOR_GUIDE.md PR-URL: https://github.com/nodejs/node/pull/10337 Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: Luigi Pinca Reviewed-By: Jeremiah Senkpiel Reviewed-By: Gibson Fahnestock --- COLLABORATOR_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/COLLABORATOR_GUIDE.md b/COLLABORATOR_GUIDE.md index abf91289fcfdec..32533304fecc67 100644 --- a/COLLABORATOR_GUIDE.md +++ b/COLLABORATOR_GUIDE.md @@ -36,7 +36,7 @@ Collaborators or additional evidence that the issue has relevance, the issue may be closed. Remember that issues can always be re-opened if necessary. -[**See "Who to CC in issues"**](./onboarding-extras.md#who-to-cc-in-issues) +[**See "Who to CC in issues"**](./doc/onboarding-extras.md#who-to-cc-in-issues) ## Accepting Modifications From 9a9e53029106967eb6f9e77432be401eefcb0a58 Mon Sep 17 00:00:00 2001 From: AnnaMag Date: Sat, 17 Dec 2016 17:05:02 +0100 Subject: [PATCH 008/144] test: add known_issues test for #5350 PR-URL: https://github.com/nodejs/node/pull/10319 Reviewed-By: Colin Ihrig Reviewed-By: Jeremiah Senkpiel Reviewed-By: Franziska Hinkelmann --- .../test-vm-inherited_properties.js | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 test/known_issues/test-vm-inherited_properties.js diff --git a/test/known_issues/test-vm-inherited_properties.js b/test/known_issues/test-vm-inherited_properties.js new file mode 100644 index 00000000000000..f90cf3568ed9c5 --- /dev/null +++ b/test/known_issues/test-vm-inherited_properties.js @@ -0,0 +1,20 @@ +'use strict'; +// Ref: https://github.com/nodejs/node/issues/5350 + +require('../common'); +const vm = require('vm'); +const assert = require('assert'); + +const base = { + propBase: 1 +}; + +const sandbox = Object.create(base, { + propSandbox: {value: 3} +}); + +const context = vm.createContext(sandbox); + +const result = vm.runInContext('this.hasOwnProperty("propBase");', context); + +assert.strictEqual(result, false); From 2f92945a70c2268c112610840996e6a5655f2051 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 20 Dec 2016 21:54:04 -0800 Subject: [PATCH 009/144] test: use consistent block spacing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In preparation for enabling an ESLint rule, use consistent block spacing. This changes only six files in the code base as block spacing is consistent throughout the rest of the code base. Before: function(c) {data += c;} After: function(c) { data += c; } PR-URL: https://github.com/nodejs/node/pull/10377 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Gibson Fahnestock Reviewed-By: Michaël Zasso Reviewed-By: Julian Duque --- test/parallel/test-assert.js | 6 +++--- test/parallel/test-buffer-alloc.js | 2 +- test/parallel/test-debugger-repeat-last.js | 6 +++--- test/parallel/test-fs-realpath.js | 24 +++++++++++----------- test/parallel/test-http-chunked.js | 4 ++-- test/parallel/test-v8-flag-type-check.js | 4 ++-- 6 files changed, 23 insertions(+), 23 deletions(-) diff --git a/test/parallel/test-assert.js b/test/parallel/test-assert.js index 3564c3b5ec25c5..eb983345418637 100644 --- a/test/parallel/test-assert.js +++ b/test/parallel/test-assert.js @@ -358,9 +358,9 @@ try { assert.equal(true, threw, 'a.doesNotThrow is not catching type matching errors'); -assert.throws(function() {assert.ifError(new Error('test error'));}); -assert.doesNotThrow(function() {assert.ifError(null);}); -assert.doesNotThrow(function() {assert.ifError();}); +assert.throws(function() { assert.ifError(new Error('test error')); }); +assert.doesNotThrow(function() { assert.ifError(null); }); +assert.doesNotThrow(function() { assert.ifError(); }); assert.throws(() => { assert.doesNotThrow(makeBlock(thrower, Error), 'user message'); diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js index 6e97cf56b330df..4bc18dd1ab1cf7 100644 --- a/test/parallel/test-buffer-alloc.js +++ b/test/parallel/test-buffer-alloc.js @@ -990,7 +990,7 @@ assert.throws(() => Buffer.from('', 'buffer'), TypeError); { let a = [0]; for (let i = 0; i < 7; ++i) a = a.concat(a); - a = a.map((_, i) => {return i;}); + a = a.map((_, i) => { return i; }); const b = Buffer.from(a); const c = Buffer.from(b); assert.strictEqual(b.length, a.length); diff --git a/test/parallel/test-debugger-repeat-last.js b/test/parallel/test-debugger-repeat-last.js index 157baada0c3ea7..7a43bbb6c767ae 100644 --- a/test/parallel/test-debugger-repeat-last.js +++ b/test/parallel/test-debugger-repeat-last.js @@ -26,15 +26,15 @@ var sentExit = false; proc.stdout.on('data', (data) => { stdout += data; if (!sentCommand && stdout.includes('> 1')) { - setImmediate(() => {proc.stdin.write('n\n');}); + setImmediate(() => { proc.stdin.write('n\n'); }); return sentCommand = true; } if (!sentEmpty && stdout.includes('> 3')) { - setImmediate(() => {proc.stdin.write('\n');}); + setImmediate(() => { proc.stdin.write('\n'); }); return sentEmpty = true; } if (!sentExit && sentCommand && sentEmpty) { - setTimeout(() => {proc.stdin.write('\n\n\n.exit\n\n\n');}, 1); + setTimeout(() => { proc.stdin.write('\n\n\n.exit\n\n\n'); }, 1); return sentExit = true; } }); diff --git a/test/parallel/test-fs-realpath.js b/test/parallel/test-fs-realpath.js index 250837467d84eb..dcaf8177c9499c 100644 --- a/test/parallel/test-fs-realpath.js +++ b/test/parallel/test-fs-realpath.js @@ -87,7 +87,7 @@ function test_simple_relative_symlink(callback) { [ [entry, '../' + common.tmpDirName + '/cycles/root.js'] ].forEach(function(t) { - try {fs.unlinkSync(t[0]);} catch (e) {} + try { fs.unlinkSync(t[0]); } catch (e) {} console.log('fs.symlinkSync(%j, %j, %j)', t[1], t[0], 'file'); fs.symlinkSync(t[1], t[0], 'file'); unlink.push(t[0]); @@ -113,7 +113,7 @@ function test_simple_absolute_symlink(callback) { [ [entry, expected] ].forEach(function(t) { - try {fs.unlinkSync(t[0]);} catch (e) {} + try { fs.unlinkSync(t[0]); } catch (e) {} console.error('fs.symlinkSync(%j, %j, %j)', t[1], t[0], type); fs.symlinkSync(t[1], t[0], type); unlink.push(t[0]); @@ -138,13 +138,13 @@ function test_deep_relative_file_symlink(callback) { expected); const linkPath1 = path.join(targetsAbsDir, 'nested-index', 'one', 'symlink1.js'); - try {fs.unlinkSync(linkPath1);} catch (e) {} + try { fs.unlinkSync(linkPath1); } catch (e) {} fs.symlinkSync(linkData1, linkPath1, 'file'); const linkData2 = '../one/symlink1.js'; const entry = path.join(targetsAbsDir, 'nested-index', 'two', 'symlink1-b.js'); - try {fs.unlinkSync(entry);} catch (e) {} + try { fs.unlinkSync(entry); } catch (e) {} fs.symlinkSync(linkData2, entry, 'file'); unlink.push(linkPath1); unlink.push(entry); @@ -165,13 +165,13 @@ function test_deep_relative_dir_symlink(callback) { const path1b = path.join(targetsAbsDir, 'nested-index', 'one'); const linkPath1b = path.join(path1b, 'symlink1-dir'); const linkData1b = path.relative(path1b, expected); - try {fs.unlinkSync(linkPath1b);} catch (e) {} + try { fs.unlinkSync(linkPath1b); } catch (e) {} fs.symlinkSync(linkData1b, linkPath1b, 'dir'); const linkData2b = '../one/symlink1-dir'; const entry = path.join(targetsAbsDir, 'nested-index', 'two', 'symlink12-dir'); - try {fs.unlinkSync(entry);} catch (e) {} + try { fs.unlinkSync(entry); } catch (e) {} fs.symlinkSync(linkData2b, entry, 'dir'); unlink.push(linkPath1b); unlink.push(entry); @@ -195,7 +195,7 @@ function test_cyclic_link_protection(callback) { [common.tmpDir + '/cycles/realpath-3b', '../cycles/realpath-3c'], [common.tmpDir + '/cycles/realpath-3c', '../cycles/realpath-3a'] ].forEach(function(t) { - try {fs.unlinkSync(t[0]);} catch (e) {} + try { fs.unlinkSync(t[0]); } catch (e) {} fs.symlinkSync(t[1], t[0], 'dir'); unlink.push(t[0]); }); @@ -218,7 +218,7 @@ function test_cyclic_link_overprotection(callback) { const link = folder + '/cycles'; var testPath = cycles; testPath += '/folder/cycles'.repeat(10); - try {fs.unlinkSync(link);} catch (ex) {} + try { fs.unlinkSync(link); } catch (ex) {} fs.symlinkSync(cycles, link, 'dir'); unlink.push(link); assertEqualPath(fs.realpathSync(testPath), path.resolve(expected)); @@ -246,7 +246,7 @@ function test_relative_input_cwd(callback) { ].forEach(function(t) { const fn = t[0]; console.error('fn=%j', fn); - try {fs.unlinkSync(fn);} catch (e) {} + try { fs.unlinkSync(fn); } catch (e) {} const b = path.basename(t[1]); const type = (b === 'root.js' ? 'file' : 'dir'); console.log('fs.symlinkSync(%j, %j, %j)', t[1], fn, type); @@ -363,7 +363,7 @@ function test_up_multiple(cb) { ['a/b', 'a' ].forEach(function(folder) { - try {fs.rmdirSync(tmp(folder));} catch (ex) {} + try { fs.rmdirSync(tmp(folder)); } catch (ex) {} }); } function setup() { @@ -420,14 +420,14 @@ function test_abs_with_kids(cb) { ['/a/b/c/x.txt', '/a/link' ].forEach(function(file) { - try {fs.unlinkSync(root + file);} catch (ex) {} + try { fs.unlinkSync(root + file); } catch (ex) {} }); ['/a/b/c', '/a/b', '/a', '' ].forEach(function(folder) { - try {fs.rmdirSync(root + folder);} catch (ex) {} + try { fs.rmdirSync(root + folder); } catch (ex) {} }); } function setup() { diff --git a/test/parallel/test-http-chunked.js b/test/parallel/test-http-chunked.js index 6edb1b3f96a74b..bb91dadd159de5 100644 --- a/test/parallel/test-http-chunked.js +++ b/test/parallel/test-http-chunked.js @@ -25,7 +25,7 @@ server.listen(0, function() { port: this.address().port }, function(x) { x.setEncoding('utf8'); - x.on('data', function(c) {data += c;}); + x.on('data', function(c) { data += c; }); x.on('error', function(e) { throw e; }); @@ -37,7 +37,7 @@ server.listen(0, function() { server.close(); }); }); - get.on('error', function(e) {throw e;}); + get.on('error', function(e) { throw e; }); get.end(); }); diff --git a/test/parallel/test-v8-flag-type-check.js b/test/parallel/test-v8-flag-type-check.js index 3724944821343c..986f37314d605f 100644 --- a/test/parallel/test-v8-flag-type-check.js +++ b/test/parallel/test-v8-flag-type-check.js @@ -3,5 +3,5 @@ require('../common'); var assert = require('assert'); var v8 = require('v8'); -assert.throws(function() {v8.setFlagsFromString(1);}, TypeError); -assert.throws(function() {v8.setFlagsFromString();}, TypeError); +assert.throws(function() { v8.setFlagsFromString(1); }, TypeError); +assert.throws(function() { v8.setFlagsFromString(); }, TypeError); From 3a7b63b81b8486dae2fd7fe4f3640d136cf6d837 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 20 Dec 2016 21:56:29 -0800 Subject: [PATCH 010/144] tools: enable block-spacing rule in .eslintrc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Enable rule to enforce consistent use of space between the `{` and `}` that surround a code block and the code block itself. This enforces via linting the de facto standard in the code base. PR-URL: https://github.com/nodejs/node/pull/10377 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Gibson Fahnestock Reviewed-By: Michaël Zasso Reviewed-By: Julian Duque --- .eslintrc | 1 + 1 file changed, 1 insertion(+) diff --git a/.eslintrc b/.eslintrc index 618b3d51116053..182e783206789b 100644 --- a/.eslintrc +++ b/.eslintrc @@ -76,6 +76,7 @@ rules: # Stylistic Issues # http://eslint.org/docs/rules/#stylistic-issues + block-spacing: 2 brace-style: [2, 1tbs, {allowSingleLine: true}] comma-spacing: 2 comma-style: 2 From 85d2a2abcf9a0ad73fa62ee9ac05127d040adad2 Mon Sep 17 00:00:00 2001 From: Tanuja-Sawant Date: Fri, 4 Nov 2016 21:11:10 +0530 Subject: [PATCH 011/144] doc: update writable.write return value stream.md is updated to explain the return value of writable.write(chunk) precisely. PR-URL: https://github.com/nodejs/node/pull/9468 Fixes: https://github.com/nodejs/node/issues/9247 Reviewed-By: James M Snell Reviewed-By: Ron Korving Reviewed-By: Roman Reiss --- doc/api/stream.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/api/stream.md b/doc/api/stream.md index c3544f7939c4c0..881bb2c6294258 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -440,10 +440,12 @@ occurs, the `callback` *may or may not* be called with the error as its first argument. To reliably detect write errors, add a listener for the `'error'` event. -The return value indicates whether the written `chunk` was buffered internally -and the buffer has exceeded the `highWaterMark` configured when the stream was -created. If `false` is returned, further attempts to write data to the stream -should be paused until the [`'drain'`][] event is emitted. +The return value is `true` if the internal buffer does not exceed +`highWaterMark` configured when the stream was created after admitting `chunk`. +If `false` is returned, further attempts to write data to the stream should +stop until the [`'drain'`][] event is emitted. However, the `false` return +value is only advisory and the writable stream will unconditionally accept and +buffer `chunk` even if it has not not been allowed to drain. A Writable stream in object mode will always ignore the `encoding` argument. From 2eec9afdb1588c788c86b9e8e156471197094fad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Wed, 21 Dec 2016 08:15:56 +0100 Subject: [PATCH 012/144] =?UTF-8?q?doc:=20add=20Micha=C3=ABl=20Zasso=20to?= =?UTF-8?q?=20the=20CTC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Approved by the CTC at https://github.com/nodejs/node/issues/9420 Reviewers are CTC members who voted for this. Reviewed-By: Rich Trott Reviewed-By: Evan Lucas Reviewed-By: Colin Ihrig Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Julien Gilli Reviewed-By: Myles Borins Reviewed-By: Anna Henningsen Reviewed-By: Fedor Indutny Reviewed-By: Ben Noordhuis Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Rod Vagg Reviewed-By: Shigeki Ohtsu Reviewed-By: James M Snell Reviewed-By: Brian White --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index bdb74b5446ac40..a2abc2ad388562 100644 --- a/README.md +++ b/README.md @@ -184,6 +184,8 @@ more information about the governance of the Node.js project, see **Rod Vagg** <rod@vagg.org> * [shigeki](https://github.com/shigeki) - **Shigeki Ohtsu** <ohtsu@iij.ad.jp> +* [targos](https://github.com/targos) - +**Michaël Zasso** <targos@protonmail.com> * [TheAlphaNerd](https://github.com/TheAlphaNerd) - **Myles Borins** <myles.borins@gmail.com> * [thefourtheye](https://github.com/thefourtheye) - @@ -317,8 +319,6 @@ more information about the governance of the Node.js project, see **Steven R Loomis** <srloomis@us.ibm.com> * [stefanmb](https://github.com/stefanmb) - **Stefan Budeanu** <stefan@budeanu.com> -* [targos](https://github.com/targos) - -**Michaël Zasso** <targos@protonmail.com> * [tellnes](https://github.com/tellnes) - **Christian Tellnes** <christian@tellnes.no> * [thekemkid](https://github.com/thekemkid) - From d1b4c5dc611af1685a7263713a647ec4a16ee6d5 Mon Sep 17 00:00:00 2001 From: Duy Le Date: Wed, 21 Dec 2016 15:01:31 -0800 Subject: [PATCH 013/144] test: refactor test-child-process-kill * var -> const * assert.equal() -> assert.strictEqual() PR-URL: https://github.com/nodejs/node/pull/9903 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Franziska Hinkelmann --- test/parallel/test-child-process-kill.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/parallel/test-child-process-kill.js b/test/parallel/test-child-process-kill.js index 9f6f8f6e796196..758129e1062c27 100644 --- a/test/parallel/test-child-process-kill.js +++ b/test/parallel/test-child-process-kill.js @@ -1,8 +1,8 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var spawn = require('child_process').spawn; -var cat = spawn(common.isWindows ? 'cmd' : 'cat'); +const common = require('../common'); +const assert = require('assert'); +const spawn = require('child_process').spawn; +const cat = spawn(common.isWindows ? 'cmd' : 'cat'); cat.stdout.on('end', common.mustCall(function() {})); cat.stderr.on('data', common.fail); @@ -13,6 +13,6 @@ cat.on('exit', common.mustCall(function(code, signal) { assert.strictEqual(signal, 'SIGTERM'); })); -assert.equal(cat.killed, false); +assert.strictEqual(cat.killed, false); cat.kill(); -assert.equal(cat.killed, true); +assert.strictEqual(cat.killed, true); From a7c9c5685ed0288c18a672dc34fb3b1e2f1395b2 Mon Sep 17 00:00:00 2001 From: Duy Le Date: Wed, 21 Dec 2016 15:07:46 -0800 Subject: [PATCH 014/144] test: add test-require-invalid-package Add test for requiriing an invalid package path. PR-URL: https://github.com/nodejs/node/pull/9903 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Franziska Hinkelmann --- test/parallel/test-require-invalid-package.js | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 test/parallel/test-require-invalid-package.js diff --git a/test/parallel/test-require-invalid-package.js b/test/parallel/test-require-invalid-package.js new file mode 100644 index 00000000000000..45272cf5c3af9a --- /dev/null +++ b/test/parallel/test-require-invalid-package.js @@ -0,0 +1,9 @@ +'use strict'; + +require('../common'); +const assert = require('assert'); + +// Should be an invalid package path. +assert.throws(() => require('package.json'), (err) => { + return err && err.code === 'MODULE_NOT_FOUND'; +}); From fc2fd920abe8c7b7e3f986917ea5f3714cc4cfdf Mon Sep 17 00:00:00 2001 From: Duy Le Date: Wed, 21 Dec 2016 15:09:31 -0800 Subject: [PATCH 015/144] test: refactor test-net-reconnect-error * var -> const/let * assert.equal() -> assert.strictEqual() PR-URL: https://github.com/nodejs/node/pull/9903 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Franziska Hinkelmann --- test/parallel/test-net-reconnect-error.js | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/parallel/test-net-reconnect-error.js b/test/parallel/test-net-reconnect-error.js index 4e322bd09d2914..4e1e3d14984e80 100644 --- a/test/parallel/test-net-reconnect-error.js +++ b/test/parallel/test-net-reconnect-error.js @@ -1,13 +1,13 @@ 'use strict'; -var common = require('../common'); -var net = require('net'); -var assert = require('assert'); -var N = 20; -var client_error_count = 0; -var disconnect_count = 0; +const common = require('../common'); +const net = require('net'); +const assert = require('assert'); +const N = 20; +let client_error_count = 0; +let disconnect_count = 0; // Hopefully nothing is running on common.PORT -var c = net.createConnection(common.PORT); +const c = net.createConnection(common.PORT); c.on('connect', function() { console.error('CLIENT connected'); @@ -17,7 +17,7 @@ c.on('connect', function() { c.on('error', function(e) { console.error('CLIENT error: ' + e.code); client_error_count++; - assert.equal('ECONNREFUSED', e.code); + assert.strictEqual('ECONNREFUSED', e.code); }); c.on('close', function() { @@ -27,6 +27,6 @@ c.on('close', function() { }); process.on('exit', function() { - assert.equal(N + 1, disconnect_count); - assert.equal(N + 1, client_error_count); + assert.strictEqual(N + 1, disconnect_count); + assert.strictEqual(N + 1, client_error_count); }); From 3448e8e5229de09046ddf553befe2839adc0107b Mon Sep 17 00:00:00 2001 From: Neeraj Sharma Date: Thu, 1 Dec 2016 10:49:52 -0600 Subject: [PATCH 016/144] test: use strictEqual in test-cwd-enoent-repl.js In file /test/parallel/test-cwd-enoent-repl.js at line: 26:3 and 27:3 assert.equal was used. This commit changes use of assert.equal to assert.strictEqual. PR-URL: https://github.com/nodejs/node/pull/9952 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/parallel/test-cwd-enoent-repl.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/parallel/test-cwd-enoent-repl.js b/test/parallel/test-cwd-enoent-repl.js index 2782ec4394ea06..1d03bd3e9112c1 100644 --- a/test/parallel/test-cwd-enoent-repl.js +++ b/test/parallel/test-cwd-enoent-repl.js @@ -23,6 +23,6 @@ proc.stdin.write('require("path");\n'); proc.stdin.write('process.exit(42);\n'); proc.once('exit', common.mustCall(function(exitCode, signalCode) { - assert.equal(exitCode, 42); - assert.equal(signalCode, null); + assert.strictEqual(exitCode, 42); + assert.strictEqual(signalCode, null); })); From a815a236317e781df02ee263c96c00d7c667eb15 Mon Sep 17 00:00:00 2001 From: CodeVana Date: Thu, 1 Dec 2016 08:22:36 -0800 Subject: [PATCH 017/144] test: improve domain-top-level-error-handler-throw Use assert.strictEqual instead of assert.equal. PR-URL: https://github.com/nodejs/node/pull/9950 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott --- test/parallel/test-domain-top-level-error-handler-throw.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/parallel/test-domain-top-level-error-handler-throw.js b/test/parallel/test-domain-top-level-error-handler-throw.js index c98dfc3f1f1b7b..b65b94012393c6 100644 --- a/test/parallel/test-domain-top-level-error-handler-throw.js +++ b/test/parallel/test-domain-top-level-error-handler-throw.js @@ -48,8 +48,8 @@ if (process.argv[2] === 'child') { var expectedExitCode = 7; var expectedSignal = null; - assert.equal(exitCode, expectedExitCode); - assert.equal(signal, expectedSignal); + assert.strictEqual(exitCode, expectedExitCode); + assert.strictEqual(signal, expectedSignal); }); } } From 0684211d12ab2081f34396c7d7cc3ec75283afab Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Tue, 20 Dec 2016 19:00:10 -0500 Subject: [PATCH 018/144] test: refactor the code in test-http-keep-alive * use common.mustCall to control the functions execution automatically * use let and const instead of var * use assert.strictEqual instead assert.equal PR-URL: https://github.com/nodejs/node/pull/10350 Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas --- test/parallel/test-http-keep-alive.js | 50 +++++++++++++-------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/test/parallel/test-http-keep-alive.js b/test/parallel/test-http-keep-alive.js index d48732e1161886..7f12e45765451d 100644 --- a/test/parallel/test-http-keep-alive.js +++ b/test/parallel/test-http-keep-alive.js @@ -1,51 +1,51 @@ 'use strict'; -require('../common'); -var assert = require('assert'); -var http = require('http'); +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); -var body = 'hello world\n'; +const server = http.createServer(common.mustCall((req, res) => { + const body = 'hello world\n'; -var server = http.createServer(function(req, res) { res.writeHead(200, {'Content-Length': body.length}); res.write(body); res.end(); -}); +}, 3)); -var agent = new http.Agent({maxSockets: 1}); -var headers = {'connection': 'keep-alive'}; -var name; +const agent = new http.Agent({maxSockets: 1}); +const headers = {'connection': 'keep-alive'}; +let name; -server.listen(0, function() { +server.listen(0, common.mustCall(function() { name = agent.getName({ port: this.address().port }); http.get({ path: '/', headers: headers, port: this.address().port, agent: agent - }, function(response) { - assert.equal(agent.sockets[name].length, 1); - assert.equal(agent.requests[name].length, 2); + }, common.mustCall((response) => { + assert.strictEqual(agent.sockets[name].length, 1); + assert.strictEqual(agent.requests[name].length, 2); response.resume(); - }); + })); http.get({ path: '/', headers: headers, port: this.address().port, agent: agent - }, function(response) { - assert.equal(agent.sockets[name].length, 1); - assert.equal(agent.requests[name].length, 1); + }, common.mustCall((response) => { + assert.strictEqual(agent.sockets[name].length, 1); + assert.strictEqual(agent.requests[name].length, 1); response.resume(); - }); + })); http.get({ path: '/', headers: headers, port: this.address().port, agent: agent - }, function(response) { - response.on('end', function() { - assert.equal(agent.sockets[name].length, 1); + }, common.mustCall((response) => { + response.on('end', common.mustCall(() => { + assert.strictEqual(agent.sockets[name].length, 1); assert(!agent.requests.hasOwnProperty(name)); server.close(); - }); + })); response.resume(); - }); -}); + })); +})); -process.on('exit', function() { +process.on('exit', () => { assert(!agent.sockets.hasOwnProperty(name)); assert(!agent.requests.hasOwnProperty(name)); }); From f1cc0a4d2623863291f1b987583e2f29565b2bdc Mon Sep 17 00:00:00 2001 From: Sam Shull Date: Thu, 1 Dec 2016 09:49:28 -0800 Subject: [PATCH 019/144] test: add regex check in test-buffer-bad-overload Creating a buffer from a number should throw an error with a message that describes the issue. PR-URL: https://github.com/nodejs/node/pull/10038 Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/parallel/test-buffer-bad-overload.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-buffer-bad-overload.js b/test/parallel/test-buffer-bad-overload.js index 3d99dd532e9b93..d5626e16d14419 100644 --- a/test/parallel/test-buffer-bad-overload.js +++ b/test/parallel/test-buffer-bad-overload.js @@ -8,7 +8,7 @@ assert.doesNotThrow(function() { assert.throws(function() { Buffer.from(10, 'hex'); -}); +}, /^TypeError: "value" argument must not be a number$/); assert.doesNotThrow(function() { Buffer.from('deadbeaf', 'hex'); From 815b5bdcf4927e94c0efa4bc83db1a42cf1cfc6a Mon Sep 17 00:00:00 2001 From: Ashita Nagesh Date: Thu, 1 Dec 2016 12:06:43 -0500 Subject: [PATCH 020/144] test: change assert.strict to assert.strictEqual() PR-URL: https://github.com/nodejs/node/pull/9988 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- .../test-domain-with-abort-on-uncaught-exception.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-domain-with-abort-on-uncaught-exception.js b/test/parallel/test-domain-with-abort-on-uncaught-exception.js index eae1d28e846ce2..6fb3912e5b8d9c 100644 --- a/test/parallel/test-domain-with-abort-on-uncaught-exception.js +++ b/test/parallel/test-domain-with-abort-on-uncaught-exception.js @@ -125,15 +125,15 @@ if (process.argv[2] === 'child') { } else { // By default, uncaught exceptions make node exit with an exit // code of 7. - assert.equal(exitCode, 7); - assert.equal(signal, null); + assert.strictEqual(exitCode, 7); + assert.strictEqual(signal, null); } } else { // If the top-level domain's error handler does not throw, // the process must exit gracefully, whether or not // --abort_on_uncaught_exception was passed on the command line - assert.equal(exitCode, 0); - assert.equal(signal, null); + assert.strictEqual(exitCode, 0); + assert.strictEqual(signal, null); } }); } From 18a75a085d53696a5c4859508b59632bbc8454ad Mon Sep 17 00:00:00 2001 From: Jason Wohlgemuth Date: Thu, 1 Dec 2016 10:36:16 -0600 Subject: [PATCH 021/144] test: test: refactor test-sync-fileread change equal to strictEqual and var to const PR-URL: https://github.com/nodejs/node/pull/9941 Reviewed-By: Prince John Wesley Reviewed-By: Luigi Pinca Reviewed-By: Rich Trott --- test/parallel/test-sync-fileread.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/parallel/test-sync-fileread.js b/test/parallel/test-sync-fileread.js index 36ac23fd8789a0..6882513c50b950 100644 --- a/test/parallel/test-sync-fileread.js +++ b/test/parallel/test-sync-fileread.js @@ -1,9 +1,9 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var path = require('path'); -var fs = require('fs'); +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); +const fs = require('fs'); -var fixture = path.join(common.fixturesDir, 'x.txt'); +const fixture = path.join(common.fixturesDir, 'x.txt'); -assert.equal('xyz\n', fs.readFileSync(fixture)); +assert.strictEqual(fs.readFileSync(fixture).toString(), 'xyz\n'); From 12a3b189daae019bdc5b5713024b838fcfddc2a5 Mon Sep 17 00:00:00 2001 From: Segu Riluvan Date: Thu, 1 Dec 2016 11:53:59 -0600 Subject: [PATCH 022/144] test: refactor test-cluster-net-listen 10.5 error Please use assert.strictEqual() instead of assert.equal() PR-URL: https://github.com/nodejs/node/pull/10047 Reviewed-By: James M Snell --- test/parallel/test-cluster-net-listen.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-cluster-net-listen.js b/test/parallel/test-cluster-net-listen.js index 829d1806d52889..42c13f388f6d30 100644 --- a/test/parallel/test-cluster-net-listen.js +++ b/test/parallel/test-cluster-net-listen.js @@ -7,7 +7,7 @@ var net = require('net'); if (cluster.isMaster) { // ensure that the worker exits peacefully cluster.fork().on('exit', common.mustCall(function(statusCode) { - assert.equal(statusCode, 0); + assert.strictEqual(statusCode, 0); })); } else { // listen() without port should not trigger a libuv assert From a5a738cca74457f0380be26b5884c95b8fdd60eb Mon Sep 17 00:00:00 2001 From: Richard Karmazin Date: Thu, 1 Dec 2016 10:17:17 -0600 Subject: [PATCH 023/144] test: refactor test-tls-0-dns-altname * var -> const, let * assert.equal() -> assert.strictEqual() PR-URL: https://github.com/nodejs/node/pull/9948 Reviewed-By: Teddy Katz Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: James M Snell --- test/parallel/test-tls-0-dns-altname.js | 26 ++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/test/parallel/test-tls-0-dns-altname.js b/test/parallel/test-tls-0-dns-altname.js index 4ae9f2c91b00ad..874dc6b235d644 100644 --- a/test/parallel/test-tls-0-dns-altname.js +++ b/test/parallel/test-tls-0-dns-altname.js @@ -1,16 +1,16 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); +const common = require('../common'); +const assert = require('assert'); if (!common.hasCrypto) { common.skip('missing crypto'); return; } -var tls = require('tls'); +const tls = require('tls'); -var fs = require('fs'); +const fs = require('fs'); -var server = tls.createServer({ +const server = tls.createServer({ key: fs.readFileSync(common.fixturesDir + '/keys/0-dns-key.pem'), cert: fs.readFileSync(common.fixturesDir + '/keys/0-dns-cert.pem') }, function(c) { @@ -19,16 +19,16 @@ var server = tls.createServer({ server.close(); }); }).listen(0, common.mustCall(function() { - var c = tls.connect(this.address().port, { + const c = tls.connect(this.address().port, { rejectUnauthorized: false }, common.mustCall(function() { - var cert = c.getPeerCertificate(); - assert.equal(cert.subjectaltname, - 'DNS:google.com\0.evil.com, ' + - 'DNS:just-another.com, ' + - 'IP Address:8.8.8.8, ' + - 'IP Address:8.8.4.4, ' + - 'DNS:last.com'); + const cert = c.getPeerCertificate(); + assert.strictEqual(cert.subjectaltname, + 'DNS:google.com\0.evil.com, ' + + 'DNS:just-another.com, ' + + 'IP Address:8.8.8.8, ' + + 'IP Address:8.8.4.4, ' + + 'DNS:last.com'); c.write('ok'); })); })); From 9044423bb6a35ef0c1f38f1ee7f52da4dabee755 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Wed, 14 Dec 2016 23:46:08 +0200 Subject: [PATCH 024/144] doc: modernize code examples in the cluster.md - Fixes https://github.com/nodejs/node/issues/10255 - It also will be consistent with a previous code example. - `cluster.workers` iteration: `Object.keys().forEach` -> `for`...`in` PR-URL: https://github.com/nodejs/node/pull/10270 Reviewed-By: Luigi Pinca Reviewed-By: Sam Roberts --- doc/api/cluster.md | 45 +++++++++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/doc/api/cluster.md b/doc/api/cluster.md index f3c8017da1eafd..01a2aaf6a8cc86 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -15,8 +15,10 @@ const http = require('http'); const numCPUs = require('os').cpus().length; if (cluster.isMaster) { + console.log(`Master ${process.pid} is running`); + // Fork workers. - for (var i = 0; i < numCPUs; i++) { + for (let i = 0; i < numCPUs; i++) { cluster.fork(); } @@ -30,17 +32,20 @@ if (cluster.isMaster) { res.writeHead(200); res.end('hello world\n'); }).listen(8000); + + console.log(`Worker ${process.pid} started`); } ``` Running Node.js will now share port 8000 between the workers: ```txt -$ NODE_DEBUG=cluster node server.js -23521,Master Worker 23524 online -23521,Master Worker 23526 online -23521,Master Worker 23523 online -23521,Master Worker 23528 online +$ node server.js +Master 3596 is running +Worker 4324 started +Worker 4520 started +Worker 6056 started +Worker 5644 started ``` Please note that on Windows, it is not yet possible to set up a named pipe @@ -202,27 +207,27 @@ const http = require('http'); if (cluster.isMaster) { // Keep track of http requests - var numReqs = 0; + let numReqs = 0; setInterval(() => { - console.log('numReqs =', numReqs); + console.log(`numReqs = ${numReqs}`); }, 1000); // Count requests function messageHandler(msg) { - if (msg.cmd && msg.cmd == 'notifyRequest') { + if (msg.cmd && msg.cmd === 'notifyRequest') { numReqs += 1; } } // Start workers and listen for messages containing notifyRequest const numCPUs = require('os').cpus().length; - for (var i = 0; i < numCPUs; i++) { + for (let i = 0; i < numCPUs; i++) { cluster.fork(); } - Object.keys(cluster.workers).forEach((id) => { + for (const id in cluster.workers) { cluster.workers[id].on('message', messageHandler); - }); + } } else { @@ -287,8 +292,8 @@ the `'disconnect'` event has not been emitted after some time. ```js if (cluster.isMaster) { - var worker = cluster.fork(); - var timeout; + const worker = cluster.fork(); + let timeout; worker.on('listening', (address) => { worker.send('shutdown'); @@ -304,7 +309,7 @@ if (cluster.isMaster) { } else if (cluster.isWorker) { const net = require('net'); - var server = net.createServer((socket) => { + const server = net.createServer((socket) => { // connections never end }); @@ -430,7 +435,7 @@ This example will echo back all messages from the master: ```js if (cluster.isMaster) { - var worker = cluster.fork(); + const worker = cluster.fork(); worker.send('hi there'); } else if (cluster.isWorker) { @@ -526,7 +531,7 @@ When a new worker is forked the cluster module will emit a `'fork'` event. This can be used to log worker activity, and create your own timeout. ```js -var timeouts = []; +const timeouts = []; function errorMsg() { console.error('Something must be wrong with the connection ...'); } @@ -590,7 +595,7 @@ If you need to support older versions and don't need the worker object, you can work around the discrepancy by checking the number of arguments: ```js -cluster.on('message', function(worker, message, handle) { +cluster.on('message', (worker, message, handle) => { if (arguments.length === 2) { handle = message; message = worker; @@ -809,7 +814,7 @@ before last `'disconnect'` or `'exit'` event is emitted. ```js // Go through all workers function eachWorker(callback) { - for (var id in cluster.workers) { + for (const id in cluster.workers) { callback(cluster.workers[id]); } } @@ -823,7 +828,7 @@ the worker's unique id is the easiest way to find the worker. ```js socket.on('data', (id) => { - var worker = cluster.workers[id]; + const worker = cluster.workers[id]; }); ``` From b67879f6f40e6e6c50f942a2e7c56c2c5898dfa1 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Sat, 10 Dec 2016 03:11:19 +0800 Subject: [PATCH 025/144] doc: clarify the review and landing process Adds/mentions: - Link to glossary - Commit squashing and CI run - 48/72 hour wait and PR review feature - Extra notes section - "Landed in " comment PR-URL: https://github.com/nodejs/node/pull/10202 Ref: https://github.com/nodejs/node/pull/10151 Reviewed-By: Anna Henningsen Reviewed-By: Evan Lucas Reviewed-By: Gibson Fahnestock --- CONTRIBUTING.md | 83 ++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 75 insertions(+), 8 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 830f2615528170..b320dc21a3ed5b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -243,18 +243,85 @@ If in doubt, you can always ask for guidance in the Pull Request or on [IRC in the #node-dev channel](https://webchat.freenode.net?channels=node-dev&uio=d4). Feel free to post a comment in the Pull Request to ping reviewers if you are -awaiting an answer on something. +awaiting an answer on something. If you encounter words or acronyms that +seem unfamiliar, check out this +[glossary](https://sites.google.com/a/chromium.org/dev/glossary). +Note that multiple commits often get squashed when they are landed (see the +notes about [commit squashing](#commit-squashing)). ### Step 8: Landing -Once your Pull Request has been reviewed and approved by at least one Node.js -Collaborators (often by saying LGTM, or Looks Good To Me), and as long as -there is consensus (no objections from a Collaborator), a -Collaborator can merge the Pull Request . GitHub often shows the Pull Request as - `Closed` at this point, but don't worry. If you look at the branch you raised - your Pull Request against (probably `master`), you should see a commit with - your name on it. Congratulations and thanks for your contribution! +In order to get landed, a Pull Request needs to be reviewed and +[approved](#getting-approvals-for-your-pull-request) by +at least one Node.js Collaborator and pass a +[CI (Continuous Integration) test run](#ci-testing). +After that, as long as there are no objections +from a Collaborator, the Pull Request can be merged. If you find your +Pull Request waiting longer than you expect, see the +[notes about the waiting time](#waiting-until-the-pull-request-gets-landed). + +When a collaborator lands your Pull Request, they will post +a comment to the Pull Request page mentioning the commit(s) it +landed as. GitHub often shows the Pull Request as `Closed` at this +point, but don't worry. If you look at the branch you raised your +Pull Request against (probably `master`), you should see a commit with +your name on it. Congratulations and thanks for your contribution! + +## Additional Notes + +### Commit Squashing + +When the commits in your Pull Request get landed, they will be squashed +into one commit per logical change, with metadata added to the commit +message (including links to the Pull Request, links to relevant issues, +and the names of the reviewers). The commit history of your Pull Request, +however, will stay intact on the Pull Request page. + +For the size of "one logical change", +[0b5191f](https://github.com/nodejs/node/commit/0b5191f15d0f311c804d542b67e2e922d98834f8) +can be a good example. It touches the implementation, the documentation, +and the tests, but is still one logical change. In general, the tests should +always pass when each individual commit lands on the master branch. + +### Getting Approvals for Your Pull Request + +A Pull Request is approved either by saying LGTM, which stands for +"Looks Good To Me", or by using GitHub's Approve button. +GitHub's Pull Request review feature can be used during the process. +For more information, check out +[the video tutorial](https://www.youtube.com/watch?v=HW0RPaJqm4g) +or [the official documentation](https://help.github.com/articles/reviewing-changes-in-pull-requests/). + +After you push new changes to your branch, you need to get +approval for these new changes again, even if GitHub shows "Approved" +because the reviewers have hit the buttons before. + +### CI Testing + +Every Pull Request needs to be tested +to make sure that it works on the platforms that Node.js +supports. This is done by running the code through the CI system. + +Only a Collaborator can request a CI run. Usually one of them will do it +for you as approvals for the Pull Request come in. +If not, you can ask a Collaborator to request a CI run. + +### Waiting Until the Pull Request Gets Landed + +A Pull Request needs to stay open for at least 48 hours (72 hours on a +weekend) from when it is submitted, even after it gets approved and +passes the CI. This is to make sure that everyone has a chance to +weigh in. If the changes are trivial, collaborators may decide it +doesn't need to wait. A Pull Request may well take longer to be +merged in. All these precautions are important because Node.js is +widely used, so don't be discouraged! + +### Check Out the Collaborator's Guide + +If you want to know more about the code review and the landing process, +you can take a look at the +[collaborator's guide](https://github.com/nodejs/node/blob/master/COLLABORATOR_GUIDE.md). ## Developer's Certificate of Origin 1.1 From ba633635124c5ad139931a79d270518ab0cf5460 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 19 Dec 2016 21:40:40 -0800 Subject: [PATCH 026/144] test: refactor test-stream2-writable * add duration to setTimeout() * assert.equal() -> assert.strictEqual() * remove unused function arguments * normalize indentation PR-URL: https://github.com/nodejs/node/pull/10353 Reviewed-By: Julian Duque Reviewed-By: Matteo Collina --- test/parallel/test-stream2-writable.js | 64 +++++++++++++------------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js index 050c2121b2289a..5ecdd0f76ecb5a 100644 --- a/test/parallel/test-stream2-writable.js +++ b/test/parallel/test-stream2-writable.js @@ -46,7 +46,7 @@ function run() { console.log('# %s', name); fn({ same: assert.deepStrictEqual, - equal: assert.equal, + equal: assert.strictEqual, end: function() { count--; run(); @@ -56,7 +56,7 @@ function run() { // ensure all tests have run process.on('exit', function() { - assert.equal(count, 0); + assert.strictEqual(count, 0); }); process.nextTick(run); @@ -136,18 +136,18 @@ test('write bufferize', function(t) { }); var encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'latin1', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; tw.on('finish', function() { t.same(tw.buffer, chunks, 'got the expected chunks'); @@ -174,18 +174,18 @@ test('write no bufferize', function(t) { }; var encodings = - [ 'hex', - 'utf8', - 'utf-8', - 'ascii', - 'latin1', - 'binary', - 'base64', - 'ucs2', - 'ucs-2', - 'utf16le', - 'utf-16le', - undefined ]; + [ 'hex', + 'utf8', + 'utf-8', + 'ascii', + 'latin1', + 'binary', + 'base64', + 'ucs2', + 'ucs-2', + 'utf16le', + 'utf-16le', + undefined ]; tw.on('finish', function() { t.same(tw.buffer, chunks, 'got the expected chunks'); @@ -201,7 +201,7 @@ test('write no bufferize', function(t) { test('write callbacks', function(t) { var callbacks = chunks.map(function(chunk, i) { - return [i, function(er) { + return [i, function() { callbacks._called[i] = chunk; }]; }).reduce(function(set, x) { @@ -272,7 +272,7 @@ test('end callback called after write callback', function(t) { test('encoding should be ignored for buffers', function(t) { var tw = new W(); var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb'; - tw._write = function(chunk, encoding, cb) { + tw._write = function(chunk) { t.equal(chunk.toString('hex'), hex); t.end(); }; @@ -284,7 +284,7 @@ test('writables are not pipable', function(t) { var w = new W(); w._write = function() {}; var gotError = false; - w.on('error', function(er) { + w.on('error', function() { gotError = true; }); w.pipe(process.stdout); @@ -297,7 +297,7 @@ test('duplexes are pipable', function(t) { d._read = function() {}; d._write = function() {}; var gotError = false; - d.on('error', function(er) { + d.on('error', function() { gotError = true; }); d.pipe(process.stdout); @@ -331,7 +331,7 @@ test('dont end while writing', function(t) { setTimeout(function() { this.writing = false; cb(); - }); + }, 1); }; w.on('finish', function() { assert(wrote); @@ -368,7 +368,7 @@ test('finish does not come before sync _write cb', function(t) { assert(writeCb); t.end(); }); - w.write(Buffer.alloc(0), function(er) { + w.write(Buffer.alloc(0), function() { writeCb = true; }); w.end(); From 501165f0c1b992033c426313124644cc99e67643 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 20 Dec 2016 13:38:22 -0800 Subject: [PATCH 027/144] test: fix timers-same-timeout-wrong-list-deleted test-timers-same-timeout-wrong-list-deleted was flaky under load because there is no guarantee that a timer will fire within a given period of time. It had an exit handler that checked that the process was finishing in less than twice as much as a timer was set for. Under load, the timer could take over 200ms to fire even if it was set for 100ms, so this was causing the test to be flaky on CI from time to time. However, that timing check is unnecessary to identify the regression that the test was written for. When run with a version of Node.js that does not contain the fix that accompanied the test in its initial commit, an assertion indicating that there were still timers in the active timer list fired. So, this commit removes the exit handler timing check and relies on the existing robust active timers list length check. This allows us to move the test back to parallel because it does not seem to fail under load anymore. The test was refactored slightly, removing duplicated code to a function, using `assert.strictEqual()` instead of `assert.equal()`, changing a 10ms timer to 1ms, and improving the messages provided by assertions. Fixes: https://github.com/nodejs/node/issues/8459 PR-URL: https://github.com/nodejs/node/pull/10362 Reviewed-By: Jeremiah Senkpiel Reviewed-By: James M Snell --- ...-timers-same-timeout-wrong-list-deleted.js | 40 ++++++++----------- 1 file changed, 16 insertions(+), 24 deletions(-) rename test/{sequential => parallel}/test-timers-same-timeout-wrong-list-deleted.js (68%) diff --git a/test/sequential/test-timers-same-timeout-wrong-list-deleted.js b/test/parallel/test-timers-same-timeout-wrong-list-deleted.js similarity index 68% rename from test/sequential/test-timers-same-timeout-wrong-list-deleted.js rename to test/parallel/test-timers-same-timeout-wrong-list-deleted.js index 05c0233e124b83..8a622b32e434af 100644 --- a/test/sequential/test-timers-same-timeout-wrong-list-deleted.js +++ b/test/parallel/test-timers-same-timeout-wrong-list-deleted.js @@ -16,16 +16,6 @@ const assert = require('assert'); const Timer = process.binding('timer_wrap').Timer; const TIMEOUT = common.platformTimeout(100); -const start = Timer.now(); - -// This bug also prevents the erroneously dereferenced timer's callback -// from being called, so we can't use it's execution or lack thereof -// to assert that the bug is fixed. -process.on('exit', function() { - const end = Timer.now(); - assert.equal(end - start < TIMEOUT * 2, true, - 'Elapsed time does not include second timer\'s timeout.'); -}); const handle1 = setTimeout(common.mustCall(function() { // Cause the old TIMEOUT list to be deleted @@ -42,27 +32,22 @@ const handle1 = setTimeout(common.mustCall(function() { // erroneously deleted. If we are able to cancel the timer successfully, // the bug is fixed. clearTimeout(handle2); + setImmediate(common.mustCall(function() { setImmediate(common.mustCall(function() { - const activeHandles = process._getActiveHandles(); - const activeTimers = activeHandles.filter(function(handle) { - return handle instanceof Timer; - }); + const activeTimers = getActiveTimers(); // Make sure our clearTimeout succeeded. One timer finished and // the other was canceled, so none should be active. - assert.equal(activeTimers.length, 0, 'No Timers remain.'); + assert.strictEqual(activeTimers.length, 0, 'Timers remain.'); })); })); - }), 10); + }), 1); // Make sure our timers got added to the list. - const activeHandles = process._getActiveHandles(); - const activeTimers = activeHandles.filter(function(handle) { - return handle instanceof Timer; - }); + const activeTimers = getActiveTimers(); const shortTimer = activeTimers.find(function(handle) { - return handle._list.msecs === 10; + return handle._list.msecs === 1; }); const longTimers = activeTimers.filter(function(handle) { return handle._list.msecs === TIMEOUT; @@ -70,11 +55,18 @@ const handle1 = setTimeout(common.mustCall(function() { // Make sure our clearTimeout succeeded. One timer finished and // the other was canceled, so none should be active. - assert.equal(activeTimers.length, 3, 'There are 3 timers in the list.'); - assert(shortTimer instanceof Timer, 'The shorter timer is in the list.'); - assert.equal(longTimers.length, 2, 'Both longer timers are in the list.'); + assert.strictEqual(activeTimers.length, 3, + 'There should be 3 timers in the list.'); + assert(shortTimer instanceof Timer, 'The shorter timer is not in the list.'); + assert.strictEqual(longTimers.length, 2, + 'Both longer timers should be in the list.'); // When this callback completes, `listOnTimeout` should now look at the // correct list and refrain from removing the new TIMEOUT list which // contains the reference to the newer timer. }), TIMEOUT); + +function getActiveTimers() { + const activeHandles = process._getActiveHandles(); + return activeHandles.filter((handle) => handle instanceof Timer); +} From 0c4cf24f70f8c966747e8414fda21275fdc2c077 Mon Sep 17 00:00:00 2001 From: Kevin Zurawel Date: Thu, 1 Dec 2016 10:12:46 -0600 Subject: [PATCH 028/144] doc: update process.versions.modules documentation This commit adds a description of `process.versions.modules`, based on the comment in `src/node_version.h` lines 47-50. PR-URL: https://github.com/nodejs/node/pull/9901 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen --- doc/api/process.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/api/process.md b/doc/api/process.md index 23759a5674cef6..3e0414e59effc4 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -1651,7 +1651,9 @@ added: v0.2.0 * {Object} The `process.versions` property returns an object listing the version strings of -Node.js and its dependencies. +Node.js and its dependencies. In addition, `process.versions.modules` indicates +the current ABI version, which is increased whenever a C++ API changes. Node.js +will refuse to load native modules built for an older `modules` value. ```js console.log(process.versions); From 7c406e819bdf709fcd15fff51d3a314332f923fc Mon Sep 17 00:00:00 2001 From: Emanuel Buholzer Date: Sun, 18 Dec 2016 03:04:37 +0100 Subject: [PATCH 029/144] test: refactor test-stdin-script-child - var -> const where possible - assert.equal -> assert.strictEqual - passed the setTimeout function a second parameter for readability - used assert.strictEqual for assert(!c) as it is expected to be 0 and not some other value PR-URL: https://github.com/nodejs/node/pull/10321 Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott Reviewed-By: Italo A. Casas --- test/parallel/test-stdin-script-child.js | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/parallel/test-stdin-script-child.js b/test/parallel/test-stdin-script-child.js index ad65734364e707..091c5cb29ad296 100644 --- a/test/parallel/test-stdin-script-child.js +++ b/test/parallel/test-stdin-script-child.js @@ -1,14 +1,14 @@ 'use strict'; -require('../common'); -var assert = require('assert'); +const common = require('../common'); +const assert = require('assert'); -var spawn = require('child_process').spawn; -var child = spawn(process.execPath, [], { +const spawn = require('child_process').spawn; +const child = spawn(process.execPath, [], { env: Object.assign(process.env, { NODE_DEBUG: process.argv[2] }) }); -var wanted = child.pid + '\n'; +const wanted = child.pid + '\n'; var found = ''; child.stdout.setEncoding('utf8'); @@ -21,12 +21,12 @@ child.stderr.on('data', function(c) { console.error('> ' + c.trim().split(/\n/).join('\n> ')); }); -child.on('close', function(c) { - assert(!c); - assert.equal(found, wanted); +child.on('close', common.mustCall(function(c) { + assert.strictEqual(c, 0); + assert.strictEqual(found, wanted); console.log('ok'); -}); +})); setTimeout(function() { child.stdin.end('console.log(process.pid)'); -}); +}, 1); From 274eef4da03dac7b106fe4c1dc4dc53aaf7b02a1 Mon Sep 17 00:00:00 2001 From: Travis Bretton Date: Thu, 1 Dec 2016 10:45:24 -0700 Subject: [PATCH 030/144] test: refactoring test-pipe-head - Updated assert.equal to assert.strictEqual - Updated 'var' to 'const' - Using template literals PR-URL: https://github.com/nodejs/node/pull/10036 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Italo A. Casas --- test/parallel/test-pipe-head.js | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/parallel/test-pipe-head.js b/test/parallel/test-pipe-head.js index dcb4e89137f536..c838be03aa9a01 100644 --- a/test/parallel/test-pipe-head.js +++ b/test/parallel/test-pipe-head.js @@ -1,17 +1,17 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); +const common = require('../common'); +const assert = require('assert'); -var exec = require('child_process').exec; -var join = require('path').join; +const exec = require('child_process').exec; +const join = require('path').join; -var nodePath = process.argv[0]; -var script = join(common.fixturesDir, 'print-10-lines.js'); +const nodePath = process.argv[0]; +const script = join(common.fixturesDir, 'print-10-lines.js'); -var cmd = '"' + nodePath + '" "' + script + '" | head -2'; +const cmd = `"${nodePath}" "${script}" | head -2`; exec(cmd, common.mustCall(function(err, stdout, stderr) { - if (err) throw err; - var lines = stdout.split('\n'); - assert.equal(3, lines.length); + assert.ifError(err); + const lines = stdout.split('\n'); + assert.strictEqual(3, lines.length); })); From 14826d0569c6c1d0e56acd09a097b95db43eb222 Mon Sep 17 00:00:00 2001 From: Christopher Rokita Date: Thu, 1 Dec 2016 11:41:17 -0500 Subject: [PATCH 031/144] test: refactoring test-cluster-worker-constructor - Using assert.strictEqual instead assert.equal PR-URL: https://github.com/nodejs/node/pull/9956 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Italo A. Casas --- .../test-cluster-worker-constructor.js | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/parallel/test-cluster-worker-constructor.js b/test/parallel/test-cluster-worker-constructor.js index 2a96d24a8a3459..f3ae7db73168a9 100644 --- a/test/parallel/test-cluster-worker-constructor.js +++ b/test/parallel/test-cluster-worker-constructor.js @@ -8,21 +8,21 @@ var cluster = require('cluster'); var worker; worker = new cluster.Worker(); -assert.equal(worker.suicide, undefined); -assert.equal(worker.state, 'none'); -assert.equal(worker.id, 0); -assert.equal(worker.process, undefined); +assert.strictEqual(worker.suicide, undefined); +assert.strictEqual(worker.state, 'none'); +assert.strictEqual(worker.id, 0); +assert.strictEqual(worker.process, undefined); worker = new cluster.Worker({ id: 3, state: 'online', process: process }); -assert.equal(worker.suicide, undefined); -assert.equal(worker.state, 'online'); -assert.equal(worker.id, 3); -assert.equal(worker.process, process); +assert.strictEqual(worker.suicide, undefined); +assert.strictEqual(worker.state, 'online'); +assert.strictEqual(worker.id, 3); +assert.strictEqual(worker.process, process); worker = cluster.Worker.call({}, {id: 5}); assert(worker instanceof cluster.Worker); -assert.equal(worker.id, 5); +assert.strictEqual(worker.id, 5); From a2ec794d3b5709e33b0945a9f2a331953e4dffe3 Mon Sep 17 00:00:00 2001 From: "Avery, Frank" Date: Thu, 1 Dec 2016 10:22:35 -0600 Subject: [PATCH 032/144] test: added validation regex argument to test In this change, I've added the regex pattern to the assert.throws() in order to provide the validation argument for the call. PR-URL: https://github.com/nodejs/node/pull/9918 Reviewed-By: Teddy Katz Reviewed-By: Colin Ihrig --- test/parallel/test-file-write-stream.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/parallel/test-file-write-stream.js b/test/parallel/test-file-write-stream.js index 3c7cf8f341dce2..3b6883444249e6 100644 --- a/test/parallel/test-file-write-stream.js +++ b/test/parallel/test-file-write-stream.js @@ -46,7 +46,7 @@ file assert.throws(function() { console.error('write after end should not be allowed'); file.write('should not work anymore'); - }); + }, /^Error: write after end$/); fs.unlinkSync(fn); }); From ba46374cb97337918d358dae3434d7b8e2f2dbc8 Mon Sep 17 00:00:00 2001 From: Bartosz Sosnowski Date: Mon, 12 Dec 2016 12:02:10 +0100 Subject: [PATCH 033/144] watchdog: add flag to mark handler as disabled Adds flags that marks WinCtrlCHandlerRoutine as disabled instead of removing it. Trying to remove the controller from the controller handle itself leads to deadlock. PR-URL: https://github.com/nodejs/node/pull/10248 Reviewed-By: Anna Henningsen --- src/node_watchdog.cc | 13 ++++++++++--- src/node_watchdog.h | 1 + 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/node_watchdog.cc b/src/node_watchdog.cc index 01b39123be5e7c..5d95c4132f1b3f 100644 --- a/src/node_watchdog.cc +++ b/src/node_watchdog.cc @@ -150,7 +150,8 @@ void SigintWatchdogHelper::HandleSignal(int signum) { // Windows starts a separate thread for executing the handler, so no extra // helper thread is required. BOOL WINAPI SigintWatchdogHelper::WinCtrlCHandlerRoutine(DWORD dwCtrlType) { - if (dwCtrlType == CTRL_C_EVENT || dwCtrlType == CTRL_BREAK_EVENT) { + if (!instance.watchdog_disabled_ && + (dwCtrlType == CTRL_C_EVENT || dwCtrlType == CTRL_BREAK_EVENT)) { InformWatchdogsAboutSignal(); // Return true because the signal has been handled. @@ -207,7 +208,11 @@ int SigintWatchdogHelper::Start() { RegisterSignalHandler(SIGINT, HandleSignal); #else - SetConsoleCtrlHandler(WinCtrlCHandlerRoutine, TRUE); + if (watchdog_disabled_) { + watchdog_disabled_ = false; + } else { + SetConsoleCtrlHandler(WinCtrlCHandlerRoutine, TRUE); + } #endif return 0; @@ -251,7 +256,7 @@ bool SigintWatchdogHelper::Stop() { RegisterSignalHandler(SIGINT, SignalExit, true); #else - SetConsoleCtrlHandler(WinCtrlCHandlerRoutine, FALSE); + watchdog_disabled_ = true; #endif had_pending_signal = has_pending_signal_; @@ -292,6 +297,8 @@ SigintWatchdogHelper::SigintWatchdogHelper() has_running_thread_ = false; stopping_ = false; CHECK_EQ(0, uv_sem_init(&sem_, 0)); +#else + watchdog_disabled_ = false; #endif } diff --git a/src/node_watchdog.h b/src/node_watchdog.h index dd97e4e735ccdf..2d55d782d0af5d 100644 --- a/src/node_watchdog.h +++ b/src/node_watchdog.h @@ -91,6 +91,7 @@ class SigintWatchdogHelper { static void* RunSigintWatchdog(void* arg); static void HandleSignal(int signum); #else + bool watchdog_disabled_; static BOOL WINAPI WinCtrlCHandlerRoutine(DWORD dwCtrlType); #endif }; From bee7d7e32c27d106a2a54cd6d36f6009219ae3a8 Mon Sep 17 00:00:00 2001 From: Daniel Sims Date: Thu, 1 Dec 2016 10:54:07 -0600 Subject: [PATCH 034/144] test: change var declarations, add mustCall check In this test, I changed the var declarations to be either a let or a const. For some of the callbacks, I added a mustCall check to ensure that the functions have run. I also changed assert.equal() to assert.strictEqual(). PR-URL: https://github.com/nodejs/node/pull/9962 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- test/parallel/test-cluster-net-send.js | 33 +++++++++++++------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/test/parallel/test-cluster-net-send.js b/test/parallel/test-cluster-net-send.js index fe536b5f2a43c3..d375920e91b18b 100644 --- a/test/parallel/test-cluster-net-send.js +++ b/test/parallel/test-cluster-net-send.js @@ -1,29 +1,29 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var fork = require('child_process').fork; -var net = require('net'); +const common = require('../common'); +const assert = require('assert'); +const fork = require('child_process').fork; +const net = require('net'); if (process.argv[2] !== 'child') { console.error('[%d] master', process.pid); - var worker = fork(__filename, ['child']); - var called = false; + const worker = fork(__filename, ['child']); + let called = false; - worker.once('message', function(msg, handle) { - assert.equal(msg, 'handle'); + worker.once('message', common.mustCall(function(msg, handle) { + assert.strictEqual(msg, 'handle'); assert.ok(handle); worker.send('got'); handle.on('data', function(data) { called = true; - assert.equal(data.toString(), 'hello'); + assert.strictEqual(data.toString(), 'hello'); }); handle.on('end', function() { worker.kill(); }); - }); + })); process.once('exit', function() { assert.ok(called); @@ -31,20 +31,21 @@ if (process.argv[2] !== 'child') { } else { console.error('[%d] worker', process.pid); - var socket; - var cbcalls = 0; + let socket; + let cbcalls = 0; function socketConnected() { if (++cbcalls === 2) process.send('handle', socket); } - var server = net.createServer(function(c) { - process.once('message', function(msg) { - assert.equal(msg, 'got'); + const server = net.createServer(function(c) { + process.once('message', common.mustCall(function(msg) { + assert.strictEqual(msg, 'got'); c.end('hello'); - }); + })); socketConnected(); }); + server.listen(common.PORT, function() { socket = net.connect(common.PORT, '127.0.0.1', socketConnected); }); From 05332942e2565fd557cae6b8bc813019494d4f5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Dec 2016 11:08:56 +0100 Subject: [PATCH 035/144] tools: enforce linebreak after ternary operators This is to be consistent with the other operators and helps understanding the context when the code is grepped. PR-URL: https://github.com/nodejs/node/pull/10213 Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Teddy Katz Reviewed-By: James M Snell --- .eslintrc | 2 +- benchmark/_http-benchmarkers.js | 6 +++--- lib/_stream_readable.js | 6 +++--- lib/repl.js | 9 ++++----- lib/url.js | 6 +++--- test/parallel/test-buffer-alloc.js | 6 +++--- test/parallel/test-buffer-concat.js | 6 +++--- test/parallel/test-https-strict.js | 8 ++++---- .../test-promises-unhandled-rejections.js | 6 +++--- test/parallel/test-punycode.js | 18 +++++++++--------- test/parallel/test-readline-interface.js | 6 +++--- .../no-useless-regex-char-class-escape.js | 6 +++--- 12 files changed, 42 insertions(+), 43 deletions(-) diff --git a/.eslintrc b/.eslintrc index 182e783206789b..0aed06ca55f5c3 100644 --- a/.eslintrc +++ b/.eslintrc @@ -94,7 +94,7 @@ rules: no-multiple-empty-lines: [2, {max: 2, maxEOF: 0, maxBOF: 0}] no-tabs: 2 no-trailing-spaces: 2 - operator-linebreak: [2, after, {overrides: {'?': ignore, ':': ignore}}] + operator-linebreak: [2, after] quotes: [2, single, avoid-escape] semi: 2 semi-spacing: 2 diff --git a/benchmark/_http-benchmarkers.js b/benchmark/_http-benchmarkers.js index 5429bf386d85ac..e096efc71ba162 100644 --- a/benchmark/_http-benchmarkers.js +++ b/benchmark/_http-benchmarkers.js @@ -7,9 +7,9 @@ exports.PORT = process.env.PORT || 12346; function AutocannonBenchmarker() { this.name = 'autocannon'; - this.autocannon_exe = process.platform === 'win32' - ? 'autocannon.cmd' - : 'autocannon'; + this.autocannon_exe = process.platform === 'win32' ? + 'autocannon.cmd' : + 'autocannon'; const result = child_process.spawnSync(this.autocannon_exe, ['-h']); this.present = !(result.error && result.error.code === 'ENOENT'); } diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js index 0844604d960258..d6af8589f3bc85 100644 --- a/lib/_stream_readable.js +++ b/lib/_stream_readable.js @@ -876,9 +876,9 @@ function fromListPartial(n, list, hasStrings) { ret = list.shift(); } else { // result spans more than one buffer - ret = (hasStrings - ? copyFromBufferString(n, list) - : copyFromBuffer(n, list)); + ret = (hasStrings ? + copyFromBufferString(n, list) : + copyFromBuffer(n, list)); } return ret; } diff --git a/lib/repl.js b/lib/repl.js index 2bb25e8ca3391b..d5060a37ab5ed1 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -433,13 +433,12 @@ function REPLServer(prompt, self.lines.level = []; // Figure out which "complete" function to use. - self.completer = (typeof options.completer === 'function') - ? options.completer - : completer; + self.completer = (typeof options.completer === 'function') ? + options.completer : completer; function completer(text, cb) { - complete.call(self, text, self.editorMode - ? self.completeOnEditorMode(cb) : cb); + complete.call(self, text, self.editorMode ? + self.completeOnEditorMode(cb) : cb); } Interface.call(this, { diff --git a/lib/url.js b/lib/url.js index 75f4967ad2b6d3..0cc364686cc28e 100644 --- a/lib/url.js +++ b/lib/url.js @@ -381,9 +381,9 @@ Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { } var firstIdx = (questionIdx !== -1 && - (hashIdx === -1 || questionIdx < hashIdx) - ? questionIdx - : hashIdx); + (hashIdx === -1 || questionIdx < hashIdx) ? + questionIdx : + hashIdx); if (firstIdx === -1) { if (rest.length > 0) this.pathname = rest; diff --git a/test/parallel/test-buffer-alloc.js b/test/parallel/test-buffer-alloc.js index 4bc18dd1ab1cf7..2245c9843a5edd 100644 --- a/test/parallel/test-buffer-alloc.js +++ b/test/parallel/test-buffer-alloc.js @@ -775,9 +775,9 @@ Buffer.from(Buffer.allocUnsafe(0), 0, 0); assert.strictEqual(string, '{"type":"Buffer","data":[116,101,115,116]}'); assert.deepStrictEqual(buffer, JSON.parse(string, (key, value) => { - return value && value.type === 'Buffer' - ? Buffer.from(value.data) - : value; + return value && value.type === 'Buffer' ? + Buffer.from(value.data) : + value; })); } diff --git a/test/parallel/test-buffer-concat.js b/test/parallel/test-buffer-concat.js index 800f1055aa6222..b29e00682c114b 100644 --- a/test/parallel/test-buffer-concat.js +++ b/test/parallel/test-buffer-concat.js @@ -39,9 +39,9 @@ function assertWrongList(value) { }); } -const random10 = common.hasCrypto - ? require('crypto').randomBytes(10) - : Buffer.alloc(10, 1); +const random10 = common.hasCrypto ? + require('crypto').randomBytes(10) : + Buffer.alloc(10, 1); const empty = Buffer.alloc(0); assert.notDeepStrictEqual(random10, empty); diff --git a/test/parallel/test-https-strict.js b/test/parallel/test-https-strict.js index e62f8ad01549e5..871d9ef94e6960 100644 --- a/test/parallel/test-https-strict.js +++ b/test/parallel/test-https-strict.js @@ -127,10 +127,10 @@ function makeReq(path, port, error, host, ca) { } var req = https.get(options); expectResponseCount++; - var server = port === server1.address().port ? server1 - : port === server2.address().port ? server2 - : port === server3.address().port ? server3 - : null; + var server = port === server1.address().port ? server1 : + port === server2.address().port ? server2 : + port === server3.address().port ? server3 : + null; if (!server) throw new Error('invalid port: ' + port); server.expectCount++; diff --git a/test/parallel/test-promises-unhandled-rejections.js b/test/parallel/test-promises-unhandled-rejections.js index 3c7a40380505de..b827dd9e368a94 100644 --- a/test/parallel/test-promises-unhandled-rejections.js +++ b/test/parallel/test-promises-unhandled-rejections.js @@ -11,9 +11,9 @@ var asyncTest = (function() { var currentTest = null; function fail(error) { - var stack = currentTest - ? error.stack + '\nFrom previous event:\n' + currentTest.stack - : error.stack; + var stack = currentTest ? + error.stack + '\nFrom previous event:\n' + currentTest.stack : + error.stack; if (currentTest) process.stderr.write('\'' + currentTest.description + '\' failed\n\n'); diff --git a/test/parallel/test-punycode.js b/test/parallel/test-punycode.js index e869e6a33bf064..7d969c66ee4e13 100644 --- a/test/parallel/test-punycode.js +++ b/test/parallel/test-punycode.js @@ -182,19 +182,19 @@ const testBattery = { ), toASCII: (test) => assert.strictEqual( punycode.toASCII(test.decoded), - regexNonASCII.test(test.decoded) - ? `xn--${test.encoded}` - : test.decoded + regexNonASCII.test(test.decoded) ? + `xn--${test.encoded}` : + test.decoded ), toUnicode: (test) => assert.strictEqual( punycode.toUnicode( - regexNonASCII.test(test.decoded) - ? `xn--${test.encoded}` - : test.decoded + regexNonASCII.test(test.decoded) ? + `xn--${test.encoded}` : + test.decoded ), - regexNonASCII.test(test.decoded) - ? test.decoded.toLowerCase() - : test.decoded + regexNonASCII.test(test.decoded) ? + test.decoded.toLowerCase() : + test.decoded ) }; diff --git a/test/parallel/test-readline-interface.js b/test/parallel/test-readline-interface.js index 08dbdd488265ee..59b75cd0a86231 100644 --- a/test/parallel/test-readline-interface.js +++ b/test/parallel/test-readline-interface.js @@ -463,9 +463,9 @@ function isWarned(emitter) { }); { - const expected = terminal - ? ['\u001b[1G', '\u001b[0J', '$ ', '\u001b[3G'] - : ['$ ']; + const expected = terminal ? + ['\u001b[1G', '\u001b[0J', '$ ', '\u001b[3G'] : + ['$ ']; let counter = 0; const output = new Writable({ diff --git a/tools/eslint-rules/no-useless-regex-char-class-escape.js b/tools/eslint-rules/no-useless-regex-char-class-escape.js index 934a3fa193b506..e18077098daee2 100644 --- a/tools/eslint-rules/no-useless-regex-char-class-escape.js +++ b/tools/eslint-rules/no-useless-regex-char-class-escape.js @@ -108,9 +108,9 @@ module.exports = { }, create(context) { - const overrideSet = new Set(context.options.length - ? context.options[0].override || [] - : []); + const overrideSet = new Set(context.options.length ? + context.options[0].override || [] : + []); /** * Reports a node From 9d0220c4dec807d7d2cb9433c9a5a4379008a1a9 Mon Sep 17 00:00:00 2001 From: "Sakthipriyan Vairamani (thefourtheye)" Date: Fri, 23 Dec 2016 11:02:49 +0530 Subject: [PATCH 036/144] test: fix linter error in whatwg-url-parsing test-whatwg-url-parsing file violates the linter rule changes in https://github.com/nodejs/node/pull/10213. This patch makes the linter happy. PR-URL: https://github.com/nodejs/node/pull/10421 Reviewed-By: Anna Henningsen --- test/parallel/test-whatwg-url-parsing.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/parallel/test-whatwg-url-parsing.js b/test/parallel/test-whatwg-url-parsing.js index 99bc3ce1c63c94..ae14d1ff5060bf 100644 --- a/test/parallel/test-whatwg-url-parsing.js +++ b/test/parallel/test-whatwg-url-parsing.js @@ -136,9 +136,7 @@ for (const test of tests) { } for (const test of allTests) { - const url = test.url - ? new URL(test.url) - : new URL(test.input, test.base); + const url = test.url ? new URL(test.url) : new URL(test.input, test.base); for (const showHidden of [true, false]) { const res = url.inspect(null, { From 6796bf482924118253667c029ac7a6a78b1269b3 Mon Sep 17 00:00:00 2001 From: Evan Lucas Date: Thu, 15 Dec 2016 17:24:17 -0600 Subject: [PATCH 037/144] repl: allow autocompletion for scoped packages Previously, autocompletion of scoped packages was not supported by the repl due to not including the `@` character in the regular expression. PR-URL: https://github.com/nodejs/node/pull/10296 Reviewed-By: Prince John Wesley Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig Reviewed-By: Jeremiah Senkpiel Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- lib/repl.js | 2 +- .../node_modules/@nodejsscope/index.js | 1 + test/parallel/test-repl-tab-complete.js | 21 ++++++++++++++++--- 3 files changed, 20 insertions(+), 4 deletions(-) create mode 100644 test/fixtures/node_modules/@nodejsscope/index.js diff --git a/lib/repl.js b/lib/repl.js index d5060a37ab5ed1..858ccd883850d3 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -794,7 +794,7 @@ ArrayStream.prototype.writable = true; ArrayStream.prototype.resume = function() {}; ArrayStream.prototype.write = function() {}; -const requireRE = /\brequire\s*\(['"](([\w./-]+\/)?([\w./-]*))/; +const requireRE = /\brequire\s*\(['"](([\w@./-]+\/)?([\w@./-]*))/; const simpleExpressionRE = /(([a-zA-Z_$](?:\w|\$)*)\.)*([a-zA-Z_$](?:\w|\$)*)\.?$/; diff --git a/test/fixtures/node_modules/@nodejsscope/index.js b/test/fixtures/node_modules/@nodejsscope/index.js new file mode 100644 index 00000000000000..b42ff442aee734 --- /dev/null +++ b/test/fixtures/node_modules/@nodejsscope/index.js @@ -0,0 +1 @@ +// Not used diff --git a/test/parallel/test-repl-tab-complete.js b/test/parallel/test-repl-tab-complete.js index 77c14deaf3b023..b5f1d3856d3c71 100644 --- a/test/parallel/test-repl-tab-complete.js +++ b/test/parallel/test-repl-tab-complete.js @@ -1,8 +1,14 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var repl = require('repl'); +const common = require('../common'); +const assert = require('assert'); + +// We have to change the directory to ../fixtures before requiring repl +// in order to make the tests for completion of node_modules work properly +// since repl modifies module.paths. +process.chdir(common.fixturesDir); + +const repl = require('repl'); function getNoResultsFunction() { return common.mustCall((err, data) => { @@ -196,6 +202,15 @@ testMe.complete('require(\'n', common.mustCall(function(error, data) { }); })); +{ + const expected = ['@nodejsscope', '@nodejsscope/']; + putIn.run(['.clear']); + testMe.complete('require(\'@nodejs', common.mustCall((err, data) => { + assert.strictEqual(err, null); + assert.deepStrictEqual(data, [expected, '@nodejs']); + })); +} + // Make sure tab completion works on context properties putIn.run(['.clear']); From 9a0711d37fc70ecd5c61d3ac18d8440281163eae Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Sat, 17 Dec 2016 11:10:52 -0500 Subject: [PATCH 038/144] test: improve code in test-http-bind-twice.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * use const instead of var for required modules * use assert.strictEqual instead of assert.equal PR-URL: https://github.com/nodejs/node/pull/10318 Reviewed-By: Colin Ihrig Reviewed-By: Michaël Zasso Reviewed-By: Italo A. Casas Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/parallel/test-http-bind-twice.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/parallel/test-http-bind-twice.js b/test/parallel/test-http-bind-twice.js index ccbd84b149f381..f90905d3260f8a 100644 --- a/test/parallel/test-http-bind-twice.js +++ b/test/parallel/test-http-bind-twice.js @@ -1,15 +1,15 @@ 'use strict'; const common = require('../common'); -var assert = require('assert'); -var http = require('http'); +const assert = require('assert'); +const http = require('http'); -var server1 = http.createServer(common.fail); +const server1 = http.createServer(common.fail); server1.listen(0, '127.0.0.1', common.mustCall(function() { - var server2 = http.createServer(common.fail); + const server2 = http.createServer(common.fail); server2.listen(this.address().port, '127.0.0.1', common.fail); server2.on('error', common.mustCall(function(e) { - assert.equal(e.code, 'EADDRINUSE'); + assert.strictEqual(e.code, 'EADDRINUSE'); server1.close(); })); })); From ed76bfa7ba008a83703825cce1da949017c30f31 Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Fri, 16 Dec 2016 10:42:36 -0500 Subject: [PATCH 039/144] test: refactor code in test-cluster-http-pipe * use common.mustCall to control the functions execution automatically * use const instead of var * use assert.strictEqual instead assert.equal * use assert.ifError instead of throw error PR-URL: https://github.com/nodejs/node/pull/10297 Reviewed-By: Colin Ihrig Reviewed-By: Minwoo Jung Reviewed-By: Santiago Gimeno Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/parallel/test-cluster-http-pipe.js | 32 ++++++++++++------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/test/parallel/test-cluster-http-pipe.js b/test/parallel/test-cluster-http-pipe.js index 0d6cb422be80c2..6db581564a124c 100644 --- a/test/parallel/test-cluster-http-pipe.js +++ b/test/parallel/test-cluster-http-pipe.js @@ -13,28 +13,28 @@ if (common.isWindows) { if (cluster.isMaster) { common.refreshTmpDir(); - var worker = cluster.fork(); - worker.on('message', common.mustCall(function(msg) { - assert.equal(msg, 'DONE'); + const worker = cluster.fork(); + worker.on('message', common.mustCall((msg) => { + assert.strictEqual(msg, 'DONE'); })); - worker.on('exit', function() { - process.exit(); - }); + worker.on('exit', common.mustCall(() => {})); return; } -http.createServer(function(req, res) { - assert.equal(req.connection.remoteAddress, undefined); - assert.equal(req.connection.localAddress, undefined); // TODO common.PIPE? +http.createServer(common.mustCall((req, res) => { + assert.strictEqual(req.connection.remoteAddress, undefined); + assert.strictEqual(req.connection.localAddress, undefined); + // TODO common.PIPE? + res.writeHead(200); res.end('OK'); -}).listen(common.PIPE, function() { - http.get({ socketPath: common.PIPE, path: '/' }, function(res) { +})).listen(common.PIPE, common.mustCall(() => { + http.get({ socketPath: common.PIPE, path: '/' }, common.mustCall((res) => { res.resume(); - res.on('end', function(err) { - if (err) throw err; + res.on('end', common.mustCall((err) => { + assert.ifError(err); process.send('DONE'); process.exit(); - }); - }); -}); + })); + })); +})); From 6284d83092ffa55059acfcee745489e054d1356d Mon Sep 17 00:00:00 2001 From: Chase Starr Date: Fri, 16 Dec 2016 17:28:59 -0800 Subject: [PATCH 040/144] doc: clarify macosx-firewall suggestion BUILDING `./tools/macosx-firewall.sh` fails if run before build step. Since the suggestion comes before the build steps in the document, this change clarifies that the script should be run after building. PR-URL: https://github.com/nodejs/node/pull/10311 Reviewed-By: Myles Borins Reviewed-By: Jeremiah Senkpiel Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: Gibson Fahnestock Reviewed-By: James M Snell --- BUILDING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/BUILDING.md b/BUILDING.md index b1962d53c2caf4..b98bb75d79b656 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -24,8 +24,8 @@ On OS X, you will also need: this under the menu `Xcode -> Preferences -> Downloads` * This step will install `gcc` and the related toolchain containing `make` -* You may want to setup [firewall rules](tools/macosx-firewall.sh) to avoid -popups asking to accept incoming network connections when running tests: +* After building, you may want to setup [firewall rules](tools/macosx-firewall.sh) +to avoid popups asking to accept incoming network connections when running tests: ```console $ sudo ./tools/macosx-firewall.sh From 21fca4bddaa7abcad0b83eea64412668f3fb1ba9 Mon Sep 17 00:00:00 2001 From: Vicente Jimenez Aguilar Date: Sun, 18 Dec 2016 17:21:29 +0100 Subject: [PATCH 041/144] doc: require() tries first core not native modules Change a single word in documentation with a more precise one. Native is a module compiled in machine "native" code. A module normally written in a compiled language, not in JavaScript. Core modules form Node's built-in "core" functionalities. You don't need to install them. They are included in every Node installation and documented in https://nodejs.org/api/ . PR-URL: https://github.com/nodejs/node/pull/10324 Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: Gibson Fahnestock Reviewed-By: Sam Roberts Reviewed-By: James M Snell --- doc/api/modules.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/api/modules.md b/doc/api/modules.md index 8b8a5d65918ecf..e4bd745d7ed62a 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -368,11 +368,11 @@ example, then `require('./some-library')` would attempt to load: -If the module identifier passed to `require()` is not a native module, -and does not begin with `'/'`, `'../'`, or `'./'`, then Node.js starts at the -parent directory of the current module, and adds `/node_modules`, and -attempts to load the module from that location. Node will not append -`node_modules` to a path already ending in `node_modules`. +If the module identifier passed to `require()` is not a +[core](#modules_core_modules) module, and does not begin with `'/'`, `'../'`, or +`'./'`, then Node.js starts at the parent directory of the current module, and +adds `/node_modules`, and attempts to load the module from that location. Node +will not append `node_modules` to a path already ending in `node_modules`. If it is not found there, then it moves to the parent directory, and so on, until the root of the file system is reached. From a82be5d44c6190824b8cc72c788d088d3822aabd Mon Sep 17 00:00:00 2001 From: "Sakthipriyan Vairamani (thefourtheye)" Date: Wed, 21 Dec 2016 15:58:00 +0530 Subject: [PATCH 042/144] test: refactor test-init.js 1. Lot of repeating code has been refactored to a function 2. Errors in async calls are properly asserted 3. Fail the test if the callbacks are not fired PR-URL: https://github.com/nodejs/node/pull/10384 Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Julian Duque --- test/sequential/test-init.js | 58 ++++++++++++++++-------------------- 1 file changed, 26 insertions(+), 32 deletions(-) diff --git a/test/sequential/test-init.js b/test/sequential/test-init.js index c8127e32a1c527..75ac39c35ae4cc 100644 --- a/test/sequential/test-init.js +++ b/test/sequential/test-init.js @@ -2,46 +2,40 @@ const common = require('../common'); const assert = require('assert'); const child = require('child_process'); -const util = require('util'); +const path = require('path'); + if (process.env['TEST_INIT']) { - util.print('Loaded successfully!'); -} else { - // change CWD as we do this test so its not dependant on current CWD - // being in the test folder - process.chdir(__dirname); + return process.stdout.write('Loaded successfully!'); +} + +process.env.TEST_INIT = 1; - // slow but simple - var envCopy = JSON.parse(JSON.stringify(process.env)); - envCopy.TEST_INIT = 1; +function test(file, expected) { + const path = `"${process.execPath}" ${file}`; + child.exec(path, {env: process.env}, common.mustCall((err, out) => { + assert.ifError(err); + assert.strictEqual(out, expected, `'node ${file}' failed!`); + })); +} - child.exec('"' + process.execPath + '" test-init', {env: envCopy}, - function(err, stdout, stderr) { - assert.equal(stdout, 'Loaded successfully!', - '`node test-init` failed!'); - }); - child.exec('"' + process.execPath + '" test-init.js', {env: envCopy}, - function(err, stdout, stderr) { - assert.equal(stdout, 'Loaded successfully!', - '`node test-init.js` failed!'); - }); +{ + // change CWD as we do this test so it's not dependent on current CWD + // being in the test folder + process.chdir(__dirname); + test('test-init', 'Loaded successfully!'); + test('test-init.js', 'Loaded successfully!'); +} +{ // test-init-index is in fixtures dir as requested by ry, so go there process.chdir(common.fixturesDir); + test('test-init-index', 'Loaded successfully!'); +} - child.exec('"' + process.execPath + '" test-init-index', {env: envCopy}, - function(err, stdout, stderr) { - assert.equal(stdout, 'Loaded successfully!', - '`node test-init-index failed!'); - }); - +{ // ensures that `node fs` does not mistakenly load the native 'fs' module // instead of the desired file and that the fs module loads as // expected in node - process.chdir(common.fixturesDir + '/test-init-native/'); - - child.exec('"' + process.execPath + '" fs', {env: envCopy}, - function(err, stdout, stderr) { - assert.equal(stdout, 'fs loaded successfully', - '`node fs` failed!'); - }); + process.chdir(path.join(common.fixturesDir, 'test-init-native')); + test('fs', 'fs loaded successfully'); } From 91a2dc216d7a3470bb9e70a96221282b518a0d18 Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Wed, 21 Dec 2016 16:47:19 -0500 Subject: [PATCH 043/144] test: improve test-cluster-worker-constructor.js * use let and const instead of var * use assert.strictEqual instead assert.equal PR-URL: https://github.com/nodejs/node/pull/10396 Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: Santiago Gimeno Reviewed-By: James M Snell --- test/parallel/test-cluster-worker-constructor.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/parallel/test-cluster-worker-constructor.js b/test/parallel/test-cluster-worker-constructor.js index f3ae7db73168a9..770d45c6a9edb7 100644 --- a/test/parallel/test-cluster-worker-constructor.js +++ b/test/parallel/test-cluster-worker-constructor.js @@ -3,9 +3,9 @@ // validates correct behavior of the cluster.Worker constructor require('../common'); -var assert = require('assert'); -var cluster = require('cluster'); -var worker; +const assert = require('assert'); +const cluster = require('cluster'); +let worker; worker = new cluster.Worker(); assert.strictEqual(worker.suicide, undefined); From 4365bb45b83d56c61fd3093473d52e69bbcb1824 Mon Sep 17 00:00:00 2001 From: Myles Borins Date: Wed, 21 Dec 2016 14:14:16 -0500 Subject: [PATCH 044/144] doc: consistent 'Returns:' part two Follow up from 8eb19c4. Lower case `return` was not updated PR-URL: https://github.com/nodejs/node/pull/10391 Ref: https://github.com/nodejs/node/pull/9554 Reviewed-By: Colin Ihrig Reviewed-By: Julian Duque Reviewed-By: Jeremiah Senkpiel Reviewed-By: Anna Henningsen Reviewed-By: Luigi Pinca Reviewed-By: Roman Reiss Reviewed-By: James M Snell --- doc/api/child_process.md | 8 ++++---- doc/api/domain.md | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/doc/api/child_process.md b/doc/api/child_process.md index 738b916eb6ac4b..110de3bd869e05 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -318,7 +318,7 @@ added: v0.1.90 `'/bin/sh'` on UNIX, and `'cmd.exe'` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX, or `/d /s /c` on Windows. Defaults to `false` (no shell). -* return: {ChildProcess} +* Returns: {ChildProcess} The `child_process.spawn()` method spawns a new process using the given `command`, with command line arguments in `args`. If omitted, `args` defaults @@ -588,7 +588,7 @@ added: v0.11.12 * [`maxBuffer`][] {Number} largest amount of data (in bytes) allowed on stdout or stderr - if exceeded child process is killed * `encoding` {String} The encoding used for all stdio inputs and outputs. (Default: `'buffer'`) -* return: {Buffer|String} The stdout from the command +* Returns: {Buffer|String} The stdout from the command The `child_process.execFileSync()` method is generally identical to [`child_process.execFile()`][] with the exception that the method will not return @@ -631,7 +631,7 @@ added: v0.11.12 stdout or stderr - if exceeded child process is killed * `encoding` {String} The encoding used for all stdio inputs and outputs. (Default: `'buffer'`) -* return: {Buffer|String} The stdout from the command +* Returns: {Buffer|String} The stdout from the command The `child_process.execSync()` method is generally identical to [`child_process.exec()`][] with the exception that the method will not return until @@ -673,7 +673,7 @@ added: v0.11.12 `'/bin/sh'` on UNIX, and `'cmd.exe'` on Windows. A different shell can be specified as a string. The shell should understand the `-c` switch on UNIX, or `/d /s /c` on Windows. Defaults to `false` (no shell). -* return: {Object} +* Returns: {Object} * `pid` {Number} Pid of the child process * `output` {Array} Array of results from stdio output * `stdout` {Buffer|String} The contents of `output[1]` diff --git a/doc/api/domain.md b/doc/api/domain.md index e1dd65e05c45d3..86f5226d69e370 100644 --- a/doc/api/domain.md +++ b/doc/api/domain.md @@ -257,7 +257,7 @@ serverDomain.run(() => { ## domain.create() -* return: {Domain} +* Returns: {Domain} Returns a new Domain object. @@ -338,7 +338,7 @@ specified emitter. ### domain.bind(callback) * `callback` {Function} The callback function -* return: {Function} The bound function +* Returns: {Function} The bound function The returned function will be a wrapper around the supplied callback function. When the returned function is called, any errors that are @@ -366,7 +366,7 @@ d.on('error', (er) => { ### domain.intercept(callback) * `callback` {Function} The callback function -* return: {Function} The intercepted function +* Returns: {Function} The intercepted function This method is almost identical to [`domain.bind(callback)`][]. However, in addition to catching thrown errors, it will also intercept [`Error`][] From 6d15e7b5286be2087716139744b99c2731327f10 Mon Sep 17 00:00:00 2001 From: Troy Connor Date: Wed, 7 Dec 2016 11:06:30 -0500 Subject: [PATCH 045/144] benchmark: refactor buffer benchmarks Add configuration object createBenchmark object for buffer size & iteration in buffer-base64-encode & buffer-base64-decode.js. PR-URL: https://github.com/nodejs/node/pull/10175 Reviewed-By: James M Snell Reviewed-By: Brian White --- benchmark/buffers/buffer-base64-decode.js | 9 ++++++--- benchmark/buffers/buffer-base64-encode.js | 20 ++++++++++++-------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/benchmark/buffers/buffer-base64-decode.js b/benchmark/buffers/buffer-base64-decode.js index 01f7f1bc91bc4a..3d00e69b90764c 100644 --- a/benchmark/buffers/buffer-base64-decode.js +++ b/benchmark/buffers/buffer-base64-decode.js @@ -2,15 +2,18 @@ const assert = require('assert'); const common = require('../common.js'); -const bench = common.createBenchmark(main, {}); +const bench = common.createBenchmark(main, { + n: [32], +}); function main(conf) { + const n = +conf.n; const s = 'abcd'.repeat(8 << 20); s.match(/./); // Flatten string. assert.strictEqual(s.length % 4, 0); const b = Buffer.allocUnsafe(s.length / 4 * 3); b.write(s, 0, s.length, 'base64'); bench.start(); - for (var i = 0; i < 32; i += 1) b.base64Write(s, 0, s.length); - bench.end(32); + for (var i = 0; i < n; i += 1) b.base64Write(s, 0, s.length); + bench.end(n); } diff --git a/benchmark/buffers/buffer-base64-encode.js b/benchmark/buffers/buffer-base64-encode.js index 930dc82b6b9ab7..f618ba21ecfdc9 100644 --- a/benchmark/buffers/buffer-base64-encode.js +++ b/benchmark/buffers/buffer-base64-encode.js @@ -1,16 +1,20 @@ 'use strict'; var common = require('../common.js'); -var bench = common.createBenchmark(main, {}); +const bench = common.createBenchmark(main, { + len: [64 * 1024 * 1024], + n: [32] +}); function main(conf) { - var N = 64 * 1024 * 1024; - var b = Buffer.allocUnsafe(N); - var s = ''; - var i; + const n = +conf.n; + const len = +conf.len; + const b = Buffer.allocUnsafe(len); + let s = ''; + let i; for (i = 0; i < 256; ++i) s += String.fromCharCode(i); - for (i = 0; i < N; i += 256) b.write(s, i, 256, 'ascii'); + for (i = 0; i < len; i += 256) b.write(s, i, 256, 'ascii'); bench.start(); - for (i = 0; i < 32; ++i) b.toString('base64'); - bench.end(64); + for (i = 0; i < n; ++i) b.toString('base64'); + bench.end(n); } From 826decf8e5ea3b54079006878ef92018fc26973f Mon Sep 17 00:00:00 2001 From: larissayvette Date: Thu, 22 Dec 2016 00:29:38 +0100 Subject: [PATCH 046/144] test: basic functionality of readUIntBE() PR-URL: https://github.com/nodejs/node/pull/10417 Reviewed-By: James M Snell Reviewed-By: Rich Trott Reviewed-By: Italo A. Casas Reviewed-By: Julian Duque --- test/parallel/test-buffer-readuintbe.js | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 test/parallel/test-buffer-readuintbe.js diff --git a/test/parallel/test-buffer-readuintbe.js b/test/parallel/test-buffer-readuintbe.js new file mode 100644 index 00000000000000..162e9fea27a14f --- /dev/null +++ b/test/parallel/test-buffer-readuintbe.js @@ -0,0 +1,24 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +// testing basic functionality of readUIntBE() + +const buf = Buffer.from([42, 84, 168, 127]); +const result = buf.readUIntBE(2); + +assert.strictEqual(result, 84); + +assert.throws( + () => { + buf.readUIntBE(5); + }, + /Index out of range/ +); + +assert.doesNotThrow( + () => { + buf.readUIntBE(5, 0, true); + }, + 'readUIntBE() should not throw if noAssert is true' +); From 8a9d68ad7ca061fdf40fb2ce6861a7cf086bf9e5 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 21 Dec 2016 10:15:42 -0800 Subject: [PATCH 047/144] doc: improve common.mustCall() explanation PR-URL: https://github.com/nodejs/node/pull/10390 Reviewed-By: Colin Ihrig Reviewed-By: Julian Duque Reviewed-By: Luigi Pinca Reviewed-By: Italo A. Casas Reviewed-By: James M Snell --- test/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/README.md b/test/README.md index 1c3303435db517..8d8cec44919720 100644 --- a/test/README.md +++ b/test/README.md @@ -290,8 +290,11 @@ Array of IPV6 hosts. ### mustCall(fn[, expected]) * fn [<Function>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function) * expected [<Number>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) default = 1 +* return [<Function>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function) -Number of times `fn` should be called. +Returns a function that calls `fn`. If the returned function has not been called +exactly `expected` number of times when the test is complete, then the test will +fail. ### nodeProcessAborted(exitCode, signal) * `exitCode` [<Number>](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) From e9b2325d68e3c24d90515d1567e34642edc0c64f Mon Sep 17 00:00:00 2001 From: Wallace Zhang Date: Thu, 1 Dec 2016 12:07:23 -0600 Subject: [PATCH 048/144] test: test error messages in test-dns-regress-7070 Add a RegExp as a second argument to assert.throws(). PR-URL: https://github.com/nodejs/node/pull/10058 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Anna Henningsen --- test/parallel/test-dns-regress-7070.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/parallel/test-dns-regress-7070.js b/test/parallel/test-dns-regress-7070.js index 25cf60cf6a19f2..eb939b47559a9d 100644 --- a/test/parallel/test-dns-regress-7070.js +++ b/test/parallel/test-dns-regress-7070.js @@ -4,5 +4,7 @@ const assert = require('assert'); const dns = require('dns'); // Should not raise assertion error. Issue #7070 -assert.throws(() => dns.resolveNs([])); // bad name -assert.throws(() => dns.resolveNs('')); // bad callback +assert.throws(() => dns.resolveNs([]), // bad name + /^Error: "name" argument must be a string$/); +assert.throws(() => dns.resolveNs(''), // bad callback + /^Error: "callback" argument must be a function$/); From d3bef30b5f05143d269c05bd1eacc400f29f0e59 Mon Sep 17 00:00:00 2001 From: Segu Riluvan Date: Thu, 22 Dec 2016 21:27:56 -0600 Subject: [PATCH 049/144] test: refactor test-child-process-stdin Use assert.strictEqual instead of assert.equal and assert.ok PR-URL: https://github.com/nodejs/node/pull/10420 Reviewed-By: Anna Henningsen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/parallel/test-child-process-stdin.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-child-process-stdin.js b/test/parallel/test-child-process-stdin.js index 95be2d4c8a12b2..eccced0c32d7be 100644 --- a/test/parallel/test-child-process-stdin.js +++ b/test/parallel/test-child-process-stdin.js @@ -9,8 +9,8 @@ cat.stdin.write('hello'); cat.stdin.write(' '); cat.stdin.write('world'); -assert.ok(cat.stdin.writable); -assert.ok(!cat.stdin.readable); +assert.strictEqual(true, cat.stdin.writable); +assert.strictEqual(false, cat.stdin.readable); cat.stdin.end(); @@ -34,8 +34,8 @@ cat.on('exit', common.mustCall(function(status) { cat.on('close', common.mustCall(function() { if (common.isWindows) { - assert.equal('hello world\r\n', response); + assert.strictEqual('hello world\r\n', response); } else { - assert.equal('hello world', response); + assert.strictEqual('hello world', response); } })); From 44f4d6001fc6ae3776c455da87556480406e2064 Mon Sep 17 00:00:00 2001 From: Rico Cai Date: Thu, 1 Dec 2016 10:20:48 -0600 Subject: [PATCH 050/144] test: improve test-cluster-net-listen.js convert var to const PR-URL: https://github.com/nodejs/node/pull/9953 Reviewed-By: Rich Trott Reviewed-By: James M Snell Reviewed-By: Gibson Fahnestock --- test/parallel/test-cluster-net-listen.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-cluster-net-listen.js b/test/parallel/test-cluster-net-listen.js index 42c13f388f6d30..d065c466f26621 100644 --- a/test/parallel/test-cluster-net-listen.js +++ b/test/parallel/test-cluster-net-listen.js @@ -1,8 +1,8 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var cluster = require('cluster'); -var net = require('net'); +const common = require('../common'); +const assert = require('assert'); +const cluster = require('cluster'); +const net = require('net'); if (cluster.isMaster) { // ensure that the worker exits peacefully From e6702d6d9b58f1358e0f5cd0ce03df93bbbd558b Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 21 Dec 2016 17:53:11 -0800 Subject: [PATCH 051/144] test: fix flaky test-https-timeout Remove `setTimeout()` in test and instead rely on `common.mustCall()` on a `timeout` event handler. The test was flaky on CI. The flakiness was replicable by running the test under load. This version, in contrast, is robust under load. Took the opportunity to do some `var` -> `const` while refactoring. PR-URL: https://github.com/nodejs/node/pull/10404 Reviewed-By: Santiago Gimeno Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: James M Snell --- test/parallel/test-https-timeout.js | 34 ++++++++++------------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/test/parallel/test-https-timeout.js b/test/parallel/test-https-timeout.js index 865c15165e2460..ad8decc03cf474 100644 --- a/test/parallel/test-https-timeout.js +++ b/test/parallel/test-https-timeout.js @@ -1,24 +1,24 @@ 'use strict'; -var common = require('../common'); +const common = require('../common'); if (!common.hasCrypto) { common.skip('missing crypto'); return; } -var https = require('https'); +const https = require('https'); -var fs = require('fs'); +const fs = require('fs'); -var options = { +const options = { key: fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem'), cert: fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem') }; // a server that never replies -var server = https.createServer(options, function() { +const server = https.createServer(options, function() { console.log('Got request. Doing nothing.'); -}).listen(0, function() { - var req = https.request({ +}).listen(0, common.mustCall(function() { + const req = https.request({ host: 'localhost', port: this.address().port, path: '/', @@ -28,26 +28,14 @@ var server = https.createServer(options, function() { req.setTimeout(10); req.end(); - req.on('response', function(res) { + req.on('response', function() { console.log('got response'); }); - req.on('socket', function() { - console.log('got a socket'); - - req.socket.on('connect', function() { - console.log('socket connected'); - }); - - setTimeout(function() { - throw new Error('Did not get timeout event'); - }, 200); - }); - - req.on('timeout', function() { + req.on('timeout', common.mustCall(function() { console.log('timeout occurred outside'); req.destroy(); server.close(); process.exit(0); - }); -}); + })); +})); From 5d64f3d76fc38a232919ed606b6a3d1c772cee4a Mon Sep 17 00:00:00 2001 From: Kevin Cox Date: Thu, 1 Dec 2016 11:01:49 -0600 Subject: [PATCH 052/144] test: update test-tls-check-server-identity.js Changed var to const, assert.equal to assert.strictEqual, and used a template string for error output. PR-URL: https://github.com/nodejs/node/pull/9986 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- .../test-tls-check-server-identity.js | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/test/parallel/test-tls-check-server-identity.js b/test/parallel/test-tls-check-server-identity.js index 0732ab3c0fcd67..5c89ef8bf4d425 100644 --- a/test/parallel/test-tls-check-server-identity.js +++ b/test/parallel/test-tls-check-server-identity.js @@ -1,16 +1,17 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var util = require('util'); +const common = require('../common'); if (!common.hasCrypto) { common.skip('missing crypto'); return; } -var tls = require('tls'); +const assert = require('assert'); +const util = require('util'); -var tests = [ +const tls = require('tls'); + +const tests = [ // False-y values. { host: false, @@ -253,9 +254,9 @@ var tests = [ ]; tests.forEach(function(test, i) { - var err = tls.checkServerIdentity(test.host, test.cert); - assert.equal(err && err.reason, - test.error, - 'Test#' + i + ' failed: ' + util.inspect(test) + '\n' + - test.error + ' != ' + (err && err.reason)); + const err = tls.checkServerIdentity(test.host, test.cert); + assert.strictEqual(err && err.reason, + test.error, + `Test# ${i} failed: ${util.inspect(test)} \n` + + `${test.error} != ${(err && err.reason)}`); }); From bbe618d3e251e29a896635c6b96a75f622ac2395 Mon Sep 17 00:00:00 2001 From: Amar Zavery Date: Thu, 1 Dec 2016 11:55:13 -0600 Subject: [PATCH 053/144] test: refactor test-cluster-send-handle-twice.js - `var` --> `const` as applicable - `assert.equal` --> `assert.strictEqual` - `assert(false, ..)` --> `common.fail()` - `common.mustCall` for functions that need to be called exactly once - modified an `assert(!signal, 'Worker exited by a signal');` call to `assert.strictEqual(signal, null);` call as that made more sense PR-URL: https://github.com/nodejs/node/pull/10049 Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig --- .../test-cluster-send-handle-twice.js | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/test/parallel/test-cluster-send-handle-twice.js b/test/parallel/test-cluster-send-handle-twice.js index 172a5563f306f5..f4d1bd8e0cc137 100644 --- a/test/parallel/test-cluster-send-handle-twice.js +++ b/test/parallel/test-cluster-send-handle-twice.js @@ -1,36 +1,36 @@ 'use strict'; // Testing to send an handle twice to the parent process. -var common = require('../common'); -var assert = require('assert'); -var cluster = require('cluster'); -var net = require('net'); +const common = require('../common'); +const assert = require('assert'); +const cluster = require('cluster'); +const net = require('net'); -var workers = { +const workers = { toStart: 1 }; if (cluster.isMaster) { - for (var i = 0; i < workers.toStart; ++i) { - var worker = cluster.fork(); - worker.on('exit', function(code, signal) { + for (let i = 0; i < workers.toStart; ++i) { + const worker = cluster.fork(); + worker.on('exit', common.mustCall(function(code, signal) { assert.strictEqual(code, 0, 'Worker exited with an error code'); - assert(!signal, 'Worker exited by a signal'); - }); + assert.strictEqual(signal, null, 'Worker exited by a signal'); + })); } } else { - var server = net.createServer(function(socket) { + const server = net.createServer(function(socket) { process.send('send-handle-1', socket); process.send('send-handle-2', socket); }); server.listen(common.PORT, function() { - var client = net.connect({ host: 'localhost', port: common.PORT }); - client.on('close', function() { cluster.worker.disconnect(); }); + const client = net.connect({ host: 'localhost', port: common.PORT }); + client.on('close', common.mustCall(() => { cluster.worker.disconnect(); })); setTimeout(function() { client.end(); }, 50); }).on('error', function(e) { console.error(e); - assert(false, 'server.listen failed'); + common.fail('server.listen failed'); cluster.worker.disconnect(); }); } From db3ac5d6e7c6472d6a3627dce7e47e914a099ea2 Mon Sep 17 00:00:00 2001 From: Harish Tejwani Date: Thu, 1 Dec 2016 10:52:33 -0600 Subject: [PATCH 054/144] test: refactor test-tls-client-getephemeralkeyinfo change var to const and add mustCall PR-URL: https://github.com/nodejs/node/pull/9954 Reviewed-By: Prince John Wesley Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- .../test-tls-client-getephemeralkeyinfo.js | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/parallel/test-tls-client-getephemeralkeyinfo.js b/test/parallel/test-tls-client-getephemeralkeyinfo.js index 38fd602b1c2262..c1e6597245a612 100644 --- a/test/parallel/test-tls-client-getephemeralkeyinfo.js +++ b/test/parallel/test-tls-client-getephemeralkeyinfo.js @@ -1,16 +1,16 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); +const common = require('../common'); +const assert = require('assert'); if (!common.hasCrypto) { common.skip('missing crypto'); process.exit(); } -var tls = require('tls'); +const tls = require('tls'); -var fs = require('fs'); -var key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); -var cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); +const fs = require('fs'); +const key = fs.readFileSync(common.fixturesDir + '/keys/agent2-key.pem'); +const cert = fs.readFileSync(common.fixturesDir + '/keys/agent2-cert.pem'); var ntests = 0; var nsuccess = 0; @@ -45,12 +45,12 @@ function test(size, type, name, next) { conn.end(); }); - server.on('close', function(err) { + server.on('close', common.mustCall(function(err) { assert(!err); if (next) next(); - }); + })); - server.listen(0, '127.0.0.1', function() { + server.listen(0, '127.0.0.1', common.mustCall(function() { var client = tls.connect({ port: this.address().port, rejectUnauthorized: false @@ -62,7 +62,7 @@ function test(size, type, name, next) { nsuccess++; server.close(); }); - }); + })); } function testNOT_PFS() { From 1b9f548e7d0ef06973cfb257a855cdd696a6a499 Mon Sep 17 00:00:00 2001 From: Brian Chirgwin Date: Thu, 1 Dec 2016 11:10:28 -0600 Subject: [PATCH 055/144] test: refactor test-tls-interleave var -> let / const added common.mustCall() to callback assert.equal() -> assert.strictEqual() PR-URL: https://github.com/nodejs/node/pull/10017 Reviewed-By: Colin Ihrig --- test/parallel/test-tls-interleave.js | 34 +++++++++++++++------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/test/parallel/test-tls-interleave.js b/test/parallel/test-tls-interleave.js index d03ed249d53253..9cccee82506005 100644 --- a/test/parallel/test-tls-interleave.js +++ b/test/parallel/test-tls-interleave.js @@ -1,37 +1,38 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); +const common = require('../common'); if (!common.hasCrypto) { common.skip('missing crypto'); return; } -var tls = require('tls'); +const assert = require('assert'); -var fs = require('fs'); +const tls = require('tls'); -var dir = common.fixturesDir; -var options = { key: fs.readFileSync(dir + '/test_key.pem'), - cert: fs.readFileSync(dir + '/test_cert.pem'), - ca: [ fs.readFileSync(dir + '/test_ca.pem') ] }; +const fs = require('fs'); -var writes = [ +const dir = common.fixturesDir; +const options = { key: fs.readFileSync(dir + '/test_key.pem'), + cert: fs.readFileSync(dir + '/test_cert.pem'), + ca: [ fs.readFileSync(dir + '/test_ca.pem') ] }; + +const writes = [ 'some server data', 'and a separate packet', 'and one more', ]; -var receivedWrites = 0; +let receivedWrites = 0; -var server = tls.createServer(options, function(c) { +const server = tls.createServer(options, function(c) { writes.forEach(function(str) { c.write(str); }); -}).listen(0, function() { +}).listen(0, common.mustCall(function() { const connectOpts = { rejectUnauthorized: false }; - var c = tls.connect(this.address().port, connectOpts, function() { + const c = tls.connect(this.address().port, connectOpts, function() { c.write('some client data'); c.on('readable', function() { - var data = c.read(); + let data = c.read(); if (data === null) return; @@ -47,8 +48,9 @@ var server = tls.createServer(options, function(c) { } }); }); -}); +})); + process.on('exit', function() { - assert.equal(receivedWrites, writes.length); + assert.strictEqual(receivedWrites, writes.length); }); From 4bfd9c0a35a8891f5a5a533ba1dc9c155de4f683 Mon Sep 17 00:00:00 2001 From: Josh Mays Date: Thu, 1 Dec 2016 11:57:14 -0600 Subject: [PATCH 056/144] test: refactor test-pipe-file-to-http Changing var defs to const/let, changing assert.equal to assert.strictEqual. Wrapping functions called once with common.mustCall PR-URL: https://github.com/nodejs/node/pull/10054 Reviewed-By: Colin Ihrig --- test/parallel/test-pipe-file-to-http.js | 41 ++++++++++++------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/test/parallel/test-pipe-file-to-http.js b/test/parallel/test-pipe-file-to-http.js index 8d804f70b7b0e1..f72cfe7d793348 100644 --- a/test/parallel/test-pipe-file-to-http.js +++ b/test/parallel/test-pipe-file-to-http.js @@ -1,20 +1,19 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var fs = require('fs'); -var http = require('http'); -var path = require('path'); -var cp = require('child_process'); +const common = require('../common'); +const assert = require('assert'); +const fs = require('fs'); +const http = require('http'); +const path = require('path'); +const cp = require('child_process'); common.refreshTmpDir(); -var filename = path.join(common.tmpDir || '/tmp', 'big'); -var clientReqComplete = false; -var count = 0; +const filename = path.join(common.tmpDir || '/tmp', 'big'); +let count = 0; -var server = http.createServer(function(req, res) { - var timeoutId; - assert.equal('POST', req.method); +const server = http.createServer(function(req, res) { + let timeoutId; + assert.strictEqual('POST', req.method); req.pause(); setTimeout(function() { @@ -36,27 +35,26 @@ var server = http.createServer(function(req, res) { server.listen(0); server.on('listening', function() { - var cmd = common.ddCommand(filename, 10240); + const cmd = common.ddCommand(filename, 10240); - cp.exec(cmd, function(err, stdout, stderr) { + cp.exec(cmd, function(err) { if (err) throw err; makeRequest(); }); }); function makeRequest() { - var req = http.request({ + const req = http.request({ port: server.address().port, path: '/', method: 'POST' }); - var s = fs.ReadStream(filename); + const s = fs.ReadStream(filename); s.pipe(req); - s.on('close', function(err) { - if (err) throw err; - clientReqComplete = true; - }); + s.on('close', common.mustCall((err) => { + assert.ifError(err); + })); req.on('response', function(res) { res.resume(); @@ -67,6 +65,5 @@ function makeRequest() { } process.on('exit', function() { - assert.equal(1024 * 10240, count); - assert.ok(clientReqComplete); + assert.strictEqual(1024 * 10240, count); }); From 2ad9faa19eb424f07ed6341c18048a3627dfdafa Mon Sep 17 00:00:00 2001 From: Ken Russo Date: Thu, 1 Dec 2016 09:57:52 -0700 Subject: [PATCH 057/144] test: add second argument to assert.throws() The assert.throws() calls in test-event-emitter-max-listeners.js should include a constructor or RegExp as a second argument. PR-URL: https://github.com/nodejs/node/pull/9987 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/parallel/test-event-emitter-max-listeners.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/parallel/test-event-emitter-max-listeners.js b/test/parallel/test-event-emitter-max-listeners.js index 5dabbac6ae28c0..0ace154aa00a5c 100644 --- a/test/parallel/test-event-emitter-max-listeners.js +++ b/test/parallel/test-event-emitter-max-listeners.js @@ -1,8 +1,8 @@ 'use strict'; const common = require('../common'); -var assert = require('assert'); -var events = require('events'); -var e = new events.EventEmitter(); +const assert = require('assert'); +const events = require('events'); +const e = new events.EventEmitter(); e.on('maxListeners', common.mustCall(function() {})); @@ -11,14 +11,14 @@ e.setMaxListeners(42); assert.throws(function() { e.setMaxListeners(NaN); -}); +}, /^TypeError: "n" argument must be a positive number$/); assert.throws(function() { e.setMaxListeners(-1); -}); +}, /^TypeError: "n" argument must be a positive number$/); assert.throws(function() { e.setMaxListeners('and even this'); -}); +}, /^TypeError: "n" argument must be a positive number$/); e.emit('maxListeners'); From 9349f086d9ab9f0736fb4d6482b511869c7e39a1 Mon Sep 17 00:00:00 2001 From: Christy Leung Date: Thu, 1 Dec 2016 11:03:32 -0600 Subject: [PATCH 058/144] test: refactor test-internal-modules * var -> const * add RegExp to assert.throws() to check error message PR-URL: https://github.com/nodejs/node/pull/10016 Reviewed-By: James M Snell --- test/parallel/test-internal-modules.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-internal-modules.js b/test/parallel/test-internal-modules.js index ea300d5c5fb85c..2f11ca18dd6b34 100644 --- a/test/parallel/test-internal-modules.js +++ b/test/parallel/test-internal-modules.js @@ -1,11 +1,11 @@ 'use strict'; -var common = require('../common'); -var path = require('path'); -var assert = require('assert'); +const common = require('../common'); +const path = require('path'); +const assert = require('assert'); assert.throws(function() { require('internal/freelist'); -}); +}, /^Error: Cannot find module 'internal\/freelist'$/); assert.strictEqual( require(path.join(common.fixturesDir, 'internal-modules')), From d33e560929ef29e90abb8f0a3fdffdd749236681 Mon Sep 17 00:00:00 2001 From: "Sakthipriyan Vairamani (thefourtheye)" Date: Wed, 21 Dec 2016 07:20:45 +0530 Subject: [PATCH 059/144] test: fix and improve debug-break-on-uncaught This test runs based on a expectation that the stderr will get the string 'Debugger listening on port'. But the actual message printed to stderr has changed to 'Debugger listening on host:port'. So the the actuals tests did not even start and eventually timeout. Apart from that, changed `var`s to `let`s or `const`s. Refs: https://github.com/nodejs/node/issues/10361 PR-URL: https://github.com/nodejs/node/pull/10370 Reviewed-By: Colin Ihrig Reviewed-By: Rich Trott Reviewed-By: James M Snell Date: Tue, 20 Dec 2016 14:57:28 -0500 Subject: [PATCH 060/144] benchmark: use commas in non-csv rate output PR-URL: https://github.com/nodejs/node/pull/10360 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- benchmark/common.js | 4 +++- benchmark/run.js | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/benchmark/common.js b/benchmark/common.js index 94f11a2be96933..1ab734222a0498 100644 --- a/benchmark/common.js +++ b/benchmark/common.js @@ -185,7 +185,9 @@ function formatResult(data) { conf += ' ' + key + '=' + JSON.stringify(data.conf[key]); } - return `${data.name}${conf}: ${data.rate}`; + const rate = Math.floor(data.rate) + .toString().replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1,'); + return `${data.name}${conf}: ${rate}`; } function sendResult(data) { diff --git a/benchmark/run.js b/benchmark/run.js index 16e620f9a0db7b..52ce74024e1ae4 100644 --- a/benchmark/run.js +++ b/benchmark/run.js @@ -56,7 +56,9 @@ if (format === 'csv') { conf = conf.replace(/"/g, '""'); console.log(`"${data.name}", "${conf}", ${data.rate}, ${data.time}`); } else { - console.log(`${data.name} ${conf}: ${data.rate}`); + const rate = Math.floor(data.rate) + .toString().replace(/(\d)(?=(\d\d\d)+(?!\d))/g, '$1,'); + console.log(`${data.name} ${conf}: ${rate}`); } }); From 6629f8f83f01f8c6e3ee8116722f65a416a14e1c Mon Sep 17 00:00:00 2001 From: Jeremy Yallop Date: Mon, 12 Dec 2016 15:56:36 +0000 Subject: [PATCH 061/144] fs: cache non-symlinks in realpathSync. Extend `fs.realpathSync` to cache the results for paths that are not symlinks in addition to caching symlink mappings. PR-URL: https://github.com/nodejs/node/pull/10253 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- lib/fs.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/fs.js b/lib/fs.js index 32d78b7b187b1f..d407cf1479eac0 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -1532,6 +1532,7 @@ fs.realpathSync = function realpathSync(p, options) { var stat = fs.lstatSync(base); if (!stat.isSymbolicLink()) { knownHard[base] = true; + if (cache) cache.set(base, base); continue; } From 599a2a956b0517314267e1dc61d1a909d6278555 Mon Sep 17 00:00:00 2001 From: malen Date: Thu, 1 Dec 2016 08:54:06 -0800 Subject: [PATCH 062/144] test: refactor test-child-process-ipc Change var to const or let. Change assert.equal() to assert.strictEqual(). PR-URL: https://github.com/nodejs/node/pull/9990 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Roman Reiss --- test/parallel/test-child-process-ipc.js | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/test/parallel/test-child-process-ipc.js b/test/parallel/test-child-process-ipc.js index 7d0447569ffe73..cbaa270f5e8440 100644 --- a/test/parallel/test-child-process-ipc.js +++ b/test/parallel/test-child-process-ipc.js @@ -1,17 +1,18 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var spawn = require('child_process').spawn; +const common = require('../common'); +const assert = require('assert'); -var path = require('path'); +const spawn = require('child_process').spawn; -var sub = path.join(common.fixturesDir, 'echo.js'); +const path = require('path'); -var gotHelloWorld = false; -var gotEcho = false; +const sub = path.join(common.fixturesDir, 'echo.js'); -var child = spawn(process.argv[0], [sub]); +let gotHelloWorld = false; +let gotEcho = false; + +const child = spawn(process.argv[0], [sub]); child.stderr.on('data', function(data) { console.log('parent stderr: ' + data); @@ -23,7 +24,7 @@ child.stdout.on('data', function(data) { console.log('child said: ' + JSON.stringify(data)); if (!gotHelloWorld) { console.error('testing for hello world'); - assert.equal('hello world\r\n', data); + assert.strictEqual('hello world\r\n', data); gotHelloWorld = true; console.error('writing echo me'); child.stdin.write('echo me\r\n'); From a5b8d097c5100a3a0740b22d3163e0cb8a92920f Mon Sep 17 00:00:00 2001 From: Kailean Courtney Date: Thu, 1 Dec 2016 10:33:36 -0600 Subject: [PATCH 063/144] test: clean up repl-reset-event file * Change vars to let/const * Add mustCall * equal -> strictEqual * remove timeout PR-URL: https://github.com/nodejs/node/pull/9931 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- test/parallel/test-repl-reset-event.js | 47 ++++++++++++-------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/test/parallel/test-repl-reset-event.js b/test/parallel/test-repl-reset-event.js index eee61ac17356f4..190aa2227de9a3 100644 --- a/test/parallel/test-repl-reset-event.js +++ b/test/parallel/test-repl-reset-event.js @@ -1,52 +1,49 @@ 'use strict'; -var common = require('../common'); +const common = require('../common'); common.globalCheck = false; -var assert = require('assert'); -var repl = require('repl'); +const assert = require('assert'); +const repl = require('repl'); // Create a dummy stream that does nothing const dummy = new common.ArrayStream(); function testReset(cb) { - var r = repl.start({ + const r = repl.start({ input: dummy, output: dummy, useGlobal: false }); r.context.foo = 42; - r.on('reset', function(context) { + r.on('reset', common.mustCall(function(context) { assert(!!context, 'REPL did not emit a context with reset event'); - assert.equal(context, r.context, 'REPL emitted incorrect context'); - assert.equal(context.foo, undefined, 'REPL emitted the previous context' + - ', and is not using global as context'); + assert.strictEqual(context, r.context, 'REPL emitted incorrect context'); + assert.strictEqual( + context.foo, + undefined, + 'REPL emitted the previous context, and is not using global as context' + ); context.foo = 42; cb(); - }); + })); r.resetContext(); } -function testResetGlobal(cb) { - var r = repl.start({ +function testResetGlobal() { + const r = repl.start({ input: dummy, output: dummy, useGlobal: true }); r.context.foo = 42; - r.on('reset', function(context) { - assert.equal(context.foo, 42, - '"foo" property is missing from REPL using global as context'); - cb(); - }); + r.on('reset', common.mustCall(function(context) { + assert.strictEqual( + context.foo, + 42, + '"foo" property is missing from REPL using global as context' + ); + })); r.resetContext(); } -var timeout = setTimeout(function() { - common.fail('Timeout, REPL did not emit reset events'); -}, 5000); - -testReset(function() { - testResetGlobal(function() { - clearTimeout(timeout); - }); -}); +testReset(common.mustCall(testResetGlobal)); From d4c888df88d6fdc3f51c6a370eedbc31bc303fc5 Mon Sep 17 00:00:00 2001 From: larissayvette Date: Tue, 20 Dec 2016 12:49:51 +0100 Subject: [PATCH 064/144] test: basic functionality of readUIntLE() PR-URL: https://github.com/nodejs/node/pull/10359 Reviewed-By: Julian Duque Reviewed-By: Rich Trott Reviewed-By: Italo A. Casas Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- test/parallel/test-buffer-readuintle.js | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 test/parallel/test-buffer-readuintle.js diff --git a/test/parallel/test-buffer-readuintle.js b/test/parallel/test-buffer-readuintle.js new file mode 100644 index 00000000000000..982adb8607eaa1 --- /dev/null +++ b/test/parallel/test-buffer-readuintle.js @@ -0,0 +1,24 @@ +'use strict'; +require('../common'); +const assert = require('assert'); + +// testing basic functionality of readUIntLE() + +const buf = Buffer.from([42, 84, 168, 127]); +const result = buf.readUIntLE(2); + +assert.strictEqual(result, 168); + +assert.throws( + () => { + buf.readUIntLE(5); + }, + /Index out of range/ +); + +assert.doesNotThrow( + () => { + buf.readUIntLE(5, 0, true); + }, + 'readUIntLE() should not throw if noAssert is true' +); From 6f8c6133e3406db2ac794ed0a664c5ec0b701ecf Mon Sep 17 00:00:00 2001 From: sarahmeyer Date: Thu, 1 Dec 2016 14:03:12 -0600 Subject: [PATCH 065/144] doc: update CONTRIBUTING.MD with link to V8 guide MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently, two of the guides in the `/doc/guides` directory are actually guides for working on the Nodei.js project. Of those, one is linked from this page. This change adds a note to point people to the other. PR-URL: https://github.com/nodejs/node/pull/10070 Reviewed-By: Michaël Zasso Reviewed-By: James M Snell Reviewed-By: Myles Borins --- CONTRIBUTING.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b320dc21a3ed5b..6d419aa4954fde 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -60,16 +60,21 @@ always welcome but API or behavioral changes to modules at stability level 3 Node.js has several bundled dependencies in the *deps/* and the *tools/* directories that are not part of the project proper. Any changes to files in those directories or its subdirectories should be sent to their respective -projects. Do not send your patch to us, we cannot accept it. +projects. Do not send a patch to Node.js. We cannot accept such patches. In case of doubt, open an issue in the [issue tracker](https://github.com/nodejs/node/issues/) or contact one of the [project Collaborators](https://github.com/nodejs/node/#current-project-team-members). Especially do so if you plan to work on something big. Nothing is more frustrating than seeing your hard work go to waste because your vision -does not align with the project team. Node.js has two IRC channels, -[#Node.js](http://webchat.freenode.net/?channels=node.js) for general help and questions, and -[#Node-dev](http://webchat.freenode.net/?channels=node-dev) for development of node core specifically. +does not align with the project team. (Node.js has two IRC channels: +[#Node.js](http://webchat.freenode.net/?channels=node.js) for general help and +questions, and +[#Node-dev](http://webchat.freenode.net/?channels=node-dev) for development of +Node.js core specifically. + +For instructions on updating the version of V8 included in the *deps/* +directory, please refer to [the Maintaining V8 in Node.js guide](https://github.com/nodejs/node/blob/master/doc/guides/maintaining-V8.md). ### Step 2: Branch From 37cb971c652ef644d544e34b4688ab773454300e Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:28:33 +0200 Subject: [PATCH 066/144] doc: var => let / const in repl.md PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index b78a13544add4a..190186ed696cfa 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -78,15 +78,15 @@ The default evaluator supports direct evaluation of JavaScript expressions: ```js > 1 + 1 2 -> var m = 2 +> const m = 2 undefined > m + 1 3 ``` -Unless otherwise scoped within blocks (e.g. `{ ... }`) or functions, variables -declared either implicitly or using the `var` keyword are declared at the -`global` scope. +Unless otherwise scoped within blocks or functions, variables declared +either implicitly, or using the `const`, `let`, or `var` keywords +are declared at the global scope. #### Global and Local Scope @@ -96,7 +96,7 @@ it to the `context` object associated with each `REPLServer`. For example: ```js const repl = require('repl'); -var msg = 'message'; +const msg = 'message'; repl.start('> ').context.m = msg; ``` @@ -115,7 +115,7 @@ To specify read-only globals, context properties must be defined using ```js const repl = require('repl'); -var msg = 'message'; +const msg = 'message'; const r = repl.start('> '); Object.defineProperty(r.context, 'm', { @@ -183,7 +183,7 @@ to the provided callback function: ```js function eval(cmd, context, filename, callback) { - var result; + let result; try { result = vm.runInThisContext(cmd); } catch (e) { @@ -275,7 +275,7 @@ function initializeContext(context) { context.m = 'test'; } -var r = repl.start({prompt: '>'}); +const r = repl.start({prompt: '>'}); initializeContext(r.context); r.on('reset', initializeContext); @@ -321,7 +321,7 @@ The following example shows two new commands added to the REPL instance: ```js const repl = require('repl'); -var replServer = repl.start({prompt: '> '}); +const replServer = repl.start({prompt: '> '}); replServer.defineCommand('sayhello', { help: 'Say hello', action: function(name) { @@ -421,7 +421,7 @@ without passing any arguments (or by passing the `-i` argument): ```js $ node -> a = [1, 2, 3]; +> const a = [1, 2, 3]; [ 1, 2, 3 ] > a.forEach((v) => { ... console.log(v); @@ -493,7 +493,7 @@ socket, and a TCP socket: ```js const net = require('net'); const repl = require('repl'); -var connections = 0; +let connections = 0; repl.start({ prompt: 'Node.js via stdin> ', From ae61232493fafb48865d99678f517d4081ab3401 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:34:49 +0200 Subject: [PATCH 067/144] doc: white space unification in repl.md Add an infix space in an argument list. Change `>` into `> ` in code bits and output examples. Explicitly clarify that default REPL prompt contains a trailing space. PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index 190186ed696cfa..a35812ca47b957 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -217,10 +217,10 @@ following example, for instance, simply converts any input text to upper case: ```js const repl = require('repl'); -const r = repl.start({prompt: '>', eval: myEval, writer: myWriter}); +const r = repl.start({prompt: '> ', eval: myEval, writer: myWriter}); function myEval(cmd, context, filename, callback) { - callback(null,cmd); + callback(null, cmd); } function myWriter(output) { @@ -275,7 +275,7 @@ function initializeContext(context) { context.m = 'test'; } -const r = repl.start({prompt: '>'}); +const r = repl.start({prompt: '> '}); initializeContext(r.context); r.on('reset', initializeContext); @@ -286,15 +286,15 @@ reset to its initial value using the `.clear` command: ```js $ ./node example.js ->m +> m 'test' ->m = 1 +> m = 1 1 ->m +> m 1 ->.clear +> .clear Clearing context... ->m +> m 'test' > ``` @@ -371,8 +371,9 @@ within the action function for commands registered using the added: v0.1.91 --> -* `options` {Object} - * `prompt` {String} The input prompt to display. Defaults to `> `. +* `options` {Object | String} + * `prompt` {String} The input prompt to display. Defaults to `> ` + (with a trailing space). * `input` {Readable} The Readable stream from which REPL input will be read. Defaults to `process.stdin`. * `output` {Writable} The Writable stream to which REPL output will be From b8e2711ddd4e349d307b41312b1089eb5802caa0 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:39:28 +0200 Subject: [PATCH 068/144] doc: fix an output example in repl.md Make `_` reassignment example match more with the current output. Extend the example for more clarity. PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index a35812ca47b957..055e5385c18460 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -140,6 +140,7 @@ global or scoped variable, the input `fs` will be evaluated on-demand as The default evaluator will, by default, assign the result of the most recently evaluated expression to the special variable `_` (underscore). +Explicitly setting `_` to a value will disable this behavior. ```js > [ 'a', 'b', 'c' ] @@ -147,11 +148,14 @@ evaluated expression to the special variable `_` (underscore). > _.length 3 > _ += 1 +Expression assignment to _ now disabled. +4 +> 1 + 1 +2 +> _ 4 ``` -Explicitly setting `_` to a value will disable this behavior. - ### Custom Evaluation Functions When a new `repl.REPLServer` is created, a custom evaluation function may be From f281b190d5c8b8482fa9b59410021177e635cb8a Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:41:18 +0200 Subject: [PATCH 069/144] doc: fix a function name in repl.md `eval` => `myEval` to not shadow the global `eval` PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index 055e5385c18460..cd4c3db9606711 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -186,7 +186,7 @@ multi-line input, the eval function can return an instance of `repl.Recoverable` to the provided callback function: ```js -function eval(cmd, context, filename, callback) { +function myEval(cmd, context, filename, callback) { let result; try { result = vm.runInThisContext(cmd); From cffbfba4df31f546261a65301ee842cbff323d19 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:43:37 +0200 Subject: [PATCH 070/144] doc: replace anonymous functions in repl.md Replaced with an object shorthand and an arrow function. PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index cd4c3db9606711..268aa70c748006 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -328,14 +328,14 @@ const repl = require('repl'); const replServer = repl.start({prompt: '> '}); replServer.defineCommand('sayhello', { help: 'Say hello', - action: function(name) { + action(name) { this.lineParser.reset(); this.bufferedCommand = ''; console.log(`Hello, ${name}!`); this.displayPrompt(); } }); -replServer.defineCommand('saybye', function() { +replServer.defineCommand('saybye', () => { console.log('Goodbye!'); this.close(); }); From 9ce28ec3c515cbff212d9e79e54b092d48325b3b Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Tue, 13 Dec 2016 01:46:10 +0200 Subject: [PATCH 071/144] doc: add the valid link for curl(1) in repl.md The current autoinserted link leads to 404 page. PR-URL: https://github.com/nodejs/node/pull/10244 Reviewed-By: Anna Henningsen Reviewed-By: Sakthipriyan Vairamani Reviewed-By: James M Snell --- doc/api/repl.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/doc/api/repl.md b/doc/api/repl.md index 268aa70c748006..eb32f2c2b2755b 100644 --- a/doc/api/repl.md +++ b/doc/api/repl.md @@ -85,7 +85,7 @@ undefined ``` Unless otherwise scoped within blocks or functions, variables declared -either implicitly, or using the `const`, `let`, or `var` keywords +either implicitly or using the `const`, `let`, or `var` keywords are declared at the global scope. #### Global and Local Scope @@ -540,10 +540,11 @@ possible to connect to a long-running Node.js process without restarting it. For an example of running a "full-featured" (`terminal`) REPL over a `net.Server` and `net.Socket` instance, see: https://gist.github.com/2209310 -For an example of running a REPL instance over curl(1), +For an example of running a REPL instance over [curl(1)][], see: https://gist.github.com/2053342 [stream]: stream.html [`util.inspect()`]: util.html#util_util_inspect_object_options [`readline.Interface`]: readline.html#readline_class_interface [`readline.InterfaceCompleter`]: readline.html#readline_use_of_the_completer_function +[curl(1)]: https://curl.haxx.se/docs/manpage.html From 94a894acf2a1707223bd4bb723622f7c227dc38e Mon Sep 17 00:00:00 2001 From: "Sakthipriyan Vairamani (thefourtheye)" Date: Wed, 21 Dec 2016 07:44:05 +0530 Subject: [PATCH 072/144] test: fix and improve debugger-client test This test expects the string 'Debugger listening on port' on stderr and since the message has been changed to 'Debugger listening on host:port' this was failing always. Apart from that, this test expects the main script's name to be `src/node.js`, but that has been renamed to `lib/internal/node.js` and then to `lib/internal/bootstrap_node.js`. So, the script name has been updated. Apart from that, using `const` instead of `var` wherever possible. Refer: https://github.com/nodejs/node/issues/10361 PR-URL: https://github.com/nodejs/node/pull/10371 Reviewed-By: Colin Ihrig Reviewed-By: Sam Roberts Reviewed-By: Rich Trott Reviewed-By: James M Snell --- test/debugger/test-debugger-client.js | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/test/debugger/test-debugger-client.js b/test/debugger/test-debugger-client.js index 743a3c4352e2e1..603a6bdbd1c627 100644 --- a/test/debugger/test-debugger-client.js +++ b/test/debugger/test-debugger-client.js @@ -16,7 +16,7 @@ setTimeout(function() { let resCount = 0; const p = new debug.Protocol(); -p.onResponse = function(res) { +p.onResponse = function() { resCount++; }; @@ -89,7 +89,7 @@ function addTest(cb) { addTest(function(client, done) { console.error('requesting version'); client.reqVersion(function(err, v) { - assert.ok(!err); + assert.ifError(err); console.log('version: %s', v); assert.strictEqual(process.versions.v8, v); done(); @@ -99,13 +99,13 @@ addTest(function(client, done) { addTest(function(client, done) { console.error('requesting scripts'); client.reqScripts(function(err) { - assert.ok(!err); + assert.ifError(err); console.error('got %d scripts', Object.keys(client.scripts).length); let foundMainScript = false; for (const k in client.scripts) { const script = client.scripts[k]; - if (script && script.name === 'node.js') { + if (script && script.name === 'bootstrap_node.js') { foundMainScript = true; break; } @@ -119,7 +119,7 @@ addTest(function(client, done) { console.error('eval 2+2'); client.reqEval('2+2', function(err, res) { console.error(res); - assert.ok(!err); + assert.ifError(err); assert.strictEqual(res.text, '4'); assert.strictEqual(res.value, 4); done(); @@ -137,7 +137,7 @@ function doTest(cb, done) { const args = ['--debug=' + debugPort, '-e', script]; nodeProcess = spawn(process.execPath, args); - nodeProcess.stdout.once('data', function(c) { + nodeProcess.stdout.once('data', function() { console.log('>>> new node process: %d', nodeProcess.pid); let failed = true; try { @@ -158,7 +158,7 @@ function doTest(cb, done) { console.error('got stderr data %j', data); nodeProcess.stderr.resume(); b += data; - if (didTryConnect === false && b.match(/Debugger listening on port/)) { + if (didTryConnect === false && b.match(/Debugger listening on /)) { didTryConnect = true; // The timeout is here to expose a race in the bootstrap process. @@ -168,10 +168,10 @@ function doTest(cb, done) { function tryConnect() { // Wait for some data before trying to connect - var c = new debug.Client(); + const c = new debug.Client(); console.error('>>> connecting...'); c.connect(debug.port); - c.on('break', function(brk) { + c.on('break', function() { c.reqContinue(function() {}); }); c.on('ready', function() { @@ -199,7 +199,7 @@ function doTest(cb, done) { function run() { - var t = tests[0]; + const t = tests[0]; if (!t) return; doTest(t, function() { From cdf028c5a63f39777c1f9b6e2392c7e5a25c2a9f Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Fri, 23 Dec 2016 14:01:17 -0500 Subject: [PATCH 073/144] test: improve code in test-vm-symbols * use const instead of var * use assert.strictEqual instead of assert.equal PR-URL: https://github.com/nodejs/node/pull/10429 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- test/parallel/test-vm-symbols.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/parallel/test-vm-symbols.js b/test/parallel/test-vm-symbols.js index f80609c0101d51..0867c7253dcec4 100644 --- a/test/parallel/test-vm-symbols.js +++ b/test/parallel/test-vm-symbols.js @@ -1,11 +1,11 @@ 'use strict'; require('../common'); -var assert = require('assert'); +const assert = require('assert'); -var vm = require('vm'); +const vm = require('vm'); -var symbol = Symbol(); +const symbol = Symbol(); function Document() { this[symbol] = 'foo'; @@ -15,11 +15,11 @@ Document.prototype.getSymbolValue = function() { return this[symbol]; }; -var context = new Document(); +const context = new Document(); vm.createContext(context); -assert.equal(context.getSymbolValue(), 'foo', - 'should return symbol-keyed value from the outside'); +assert.strictEqual(context.getSymbolValue(), 'foo', + 'should return symbol-keyed value from the outside'); -assert.equal(vm.runInContext('this.getSymbolValue()', context), 'foo', - 'should return symbol-keyed value from the inside'); +assert.strictEqual(vm.runInContext('this.getSymbolValue()', context), 'foo', + 'should return symbol-keyed value from the inside'); From 98fcb221d544c194f0545502aa279443b2a11875 Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Fri, 23 Dec 2016 13:44:10 -0500 Subject: [PATCH 074/144] test: improve code in test-vm-preserves-property * use const instead of var * use assert.strictEqual instead assert.equal PR-URL https://github.com/nodejs/node/pull/10428 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- test/parallel/test-vm-preserves-property.js | 22 ++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/test/parallel/test-vm-preserves-property.js b/test/parallel/test-vm-preserves-property.js index 9786ee54a7cffd..0ddc7b260a1393 100644 --- a/test/parallel/test-vm-preserves-property.js +++ b/test/parallel/test-vm-preserves-property.js @@ -1,25 +1,25 @@ 'use strict'; require('../common'); -var assert = require('assert'); +const assert = require('assert'); -var vm = require('vm'); +const vm = require('vm'); -var x = {}; +const x = {}; Object.defineProperty(x, 'prop', { configurable: false, enumerable: false, writable: false, value: 'val' }); -var o = vm.createContext(x); +const o = vm.createContext(x); -var code = 'Object.getOwnPropertyDescriptor(this, "prop")'; -var res = vm.runInContext(code, o, 'test'); +const code = 'Object.getOwnPropertyDescriptor(this, "prop")'; +const res = vm.runInContext(code, o, 'test'); assert(res); -assert.equal(typeof res, 'object'); -assert.equal(res.value, 'val'); -assert.equal(res.configurable, false, 'should not be configurable'); -assert.equal(res.enumerable, false, 'should not be enumerable'); -assert.equal(res.writable, false, 'should not be writable'); +assert.strictEqual(typeof res, 'object'); +assert.strictEqual(res.value, 'val'); +assert.strictEqual(res.configurable, false, 'should not be configurable'); +assert.strictEqual(res.enumerable, false, 'should not be enumerable'); +assert.strictEqual(res.writable, false, 'should not be writable'); From 011bd4675ab785beef2e41b4915eabc565d0955d Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Tue, 20 Dec 2016 19:27:56 -0500 Subject: [PATCH 075/144] test: improve code in test-fs-readfile-error * use const instead of var * use common.mustCall to control the functions execution automatically * use assert.strictEqual instead of assert.equal * use assert.notStrictEqual instead of assert.notEqual * use arrow functions PR-URL: https://github.com/nodejs/node/pull/10367 Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- test/parallel/test-fs-readfile-error.js | 26 ++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/test/parallel/test-fs-readfile-error.js b/test/parallel/test-fs-readfile-error.js index 86a2be4e1bc020..827c84d7b7093c 100644 --- a/test/parallel/test-fs-readfile-error.js +++ b/test/parallel/test-fs-readfile-error.js @@ -1,8 +1,8 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var exec = require('child_process').exec; -var path = require('path'); +const common = require('../common'); +const assert = require('assert'); +const exec = require('child_process').exec; +const path = require('path'); // `fs.readFile('/')` does not fail on FreeBSD, because you can open and read // the directory there. @@ -12,23 +12,23 @@ if (process.platform === 'freebsd') { } function test(env, cb) { - var filename = path.join(common.fixturesDir, 'test-fs-readfile-error.js'); - var execPath = '"' + process.execPath + '" "' + filename + '"'; - var options = { env: Object.assign(process.env, env) }; - exec(execPath, options, function(err, stdout, stderr) { + const filename = path.join(common.fixturesDir, 'test-fs-readfile-error.js'); + const execPath = '"' + process.execPath + '" "' + filename + '"'; + const options = { env: Object.assign(process.env, env) }; + exec(execPath, options, common.mustCall((err, stdout, stderr) => { assert(err); - assert.equal(stdout, ''); - assert.notEqual(stderr, ''); + assert.strictEqual(stdout, ''); + assert.notStrictEqual(stderr, ''); cb('' + stderr); - }); + })); } -test({ NODE_DEBUG: '' }, common.mustCall(function(data) { +test({ NODE_DEBUG: '' }, common.mustCall((data) => { assert(/EISDIR/.test(data)); assert(!/test-fs-readfile-error/.test(data)); })); -test({ NODE_DEBUG: 'fs' }, common.mustCall(function(data) { +test({ NODE_DEBUG: 'fs' }, common.mustCall((data) => { assert(/EISDIR/.test(data)); assert(/test-fs-readfile-error/.test(data)); })); From f6ed2335468a6d93917cc34ae290f25d6524a5b1 Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Tue, 27 Dec 2016 12:12:07 -0500 Subject: [PATCH 076/144] test: improve the code in test-pipe.js * use const and let instead of var * use common.mustCall to control functions executions * use assert.strictEqual instead of assert.equal * use assert.ifError to handle errors * use arrow functions * remove console.log and process.stdout.write PR-URL: https://github.com/nodejs/node/pull/10452 Reviewed-By: James M Snell Reviewed-By: Luigi Pinca Reviewed-By: Brian White --- test/sequential/test-pipe.js | 94 +++++++++++++++--------------------- 1 file changed, 40 insertions(+), 54 deletions(-) diff --git a/test/sequential/test-pipe.js b/test/sequential/test-pipe.js index 36d40100a7fec6..75154bcdf5f360 100644 --- a/test/sequential/test-pipe.js +++ b/test/sequential/test-pipe.js @@ -1,110 +1,96 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var http = require('http'); -var net = require('net'); +const common = require('../common'); +const assert = require('assert'); +const http = require('http'); +const net = require('net'); -var webPort = common.PORT; -var tcpPort = webPort + 1; +const webPort = common.PORT; +const tcpPort = webPort + 1; +const bufferSize = 5 * 1024 * 1024; -var listenCount = 0; -var gotThanks = false; -var tcpLengthSeen = 0; -var bufferSize = 5 * 1024 * 1024; +let listenCount = 0; +let gotThanks = false; +let tcpLengthSeen = 0; /* * 5MB of random buffer. */ -var buffer = Buffer.allocUnsafe(bufferSize); -for (var i = 0; i < buffer.length; i++) { +const buffer = Buffer.allocUnsafe(bufferSize); +for (let i = 0; i < buffer.length; i++) { buffer[i] = parseInt(Math.random() * 10000) % 256; } -var web = http.Server(function(req, res) { +const web = http.Server(common.mustCall((req, res) => { web.close(); - console.log(req.headers); - - var socket = net.Stream(); + const socket = net.Stream(); socket.connect(tcpPort); - socket.on('connect', function() { - console.log('socket connected'); - }); + socket.on('connect', common.mustCall(() => {})); req.pipe(socket); - req.on('end', function() { + req.on('end', common.mustCall(() => { res.writeHead(200); res.write('thanks'); res.end(); - console.log('response with \'thanks\''); - }); + })); - req.connection.on('error', function(e) { - console.log('http server-side error: ' + e.message); - process.exit(1); + req.connection.on('error', (e) => { + assert.ifError(e); }); -}); +})); + web.listen(webPort, startClient); -var tcp = net.Server(function(s) { +const tcp = net.Server(common.mustCall((s) => { tcp.close(); - console.log('tcp server connection'); - - var i = 0; + let i = 0; - s.on('data', function(d) { - process.stdout.write('.'); + s.on('data', (d) => { tcpLengthSeen += d.length; - for (var j = 0; j < d.length; j++) { - assert.equal(buffer[i], d[j]); + for (let j = 0; j < d.length; j++) { + assert.strictEqual(buffer[i], d[j]); i++; } }); - s.on('end', function() { - console.log('tcp socket disconnect'); + s.on('end', common.mustCall(() => { s.end(); - }); + })); - s.on('error', function(e) { - console.log('tcp server-side error: ' + e.message); - process.exit(1); + s.on('error', (e) => { + assert.ifError(e); }); -}); -tcp.listen(tcpPort, startClient); +})); +tcp.listen(tcpPort, startClient); function startClient() { listenCount++; if (listenCount < 2) return; - console.log('Making request'); - - var req = http.request({ + const req = http.request({ port: common.PORT, method: 'GET', path: '/', headers: { 'content-length': buffer.length } - }, function(res) { - console.log('Got response'); + }, common.mustCall((res) => { res.setEncoding('utf8'); - res.on('data', function(string) { - assert.equal('thanks', string); + res.on('data', common.mustCall((string) => { + assert.strictEqual('thanks', string); gotThanks = true; - }); - }); + })); + })); req.write(buffer); req.end(); - console.error('ended request', req); } -process.on('exit', function() { +process.on('exit', () => { assert.ok(gotThanks); - assert.equal(bufferSize, tcpLengthSeen); + assert.strictEqual(bufferSize, tcpLengthSeen); }); From 3d181ce4fd27b421311c4e335b14575805679ec2 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Sun, 25 Dec 2016 19:57:12 +0200 Subject: [PATCH 077/144] doc: var -> const / let in the console.md PR-URL: https://github.com/nodejs/node/pull/10451 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- doc/api/console.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/api/console.md b/doc/api/console.md index e5d3957e5087dd..98a05d004f57c5 100644 --- a/doc/api/console.md +++ b/doc/api/console.md @@ -88,7 +88,7 @@ const errorOutput = fs.createWriteStream('./stderr.log'); // custom simple logger const logger = new Console(output, errorOutput); // use it like console -var count = 5; +const count = 5; logger.log('count: %d', count); // in stdout.log: count 5 ``` @@ -217,7 +217,7 @@ values similar to printf(3) (the arguments are all passed to [`util.format()`][]). ```js -var count = 5; +const count = 5; console.log('count: %d', count); // Prints: count: 5, to stdout console.log('count:', count); @@ -248,7 +248,7 @@ prints the result to `stdout`: ```js console.time('100-elements'); -for (var i = 0; i < 100; i++) { +for (let i = 0; i < 100; i++) { ; } console.timeEnd('100-elements'); From aabaef0aa7cc4cc80922c248d2388a8295b31e45 Mon Sep 17 00:00:00 2001 From: Vse Mozhet Byt Date: Sun, 25 Dec 2016 20:04:45 +0200 Subject: [PATCH 078/144] doc: more efficient example in the console.md Object.setPrototypeOf() -> Object.create() PR-URL: https://github.com/nodejs/node/pull/10451 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- doc/api/console.md | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/doc/api/console.md b/doc/api/console.md index 98a05d004f57c5..70b452bd84070f 100644 --- a/doc/api/console.md +++ b/doc/api/console.md @@ -135,15 +135,20 @@ the default behavior of `console` in Node.js. // Creates a simple extension of console with a // new impl for assert without monkey-patching. -const myConsole = Object.setPrototypeOf({ - assert(assertion, message, ...args) { - try { - console.assert(assertion, message, ...args); - } catch (err) { - console.error(err.stack); - } - } -}, console); +const myConsole = Object.create(console, { + assert: { + value: function assert(assertion, message, ...args) { + try { + console.assert(assertion, message, ...args); + } catch (err) { + console.error(err.stack); + } + }, + configurable: true, + enumerable: true, + writable: true, + }, +}); module.exports = myConsole; ``` From 89fb82214fcf6841a6d94425d05387125715b242 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 24 Dec 2016 10:32:47 -0800 Subject: [PATCH 079/144] doc: use "Node.js" in V8 guide MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/10438 Reviewed-By: Michaël Zasso Reviewed-By: Gibson Fahnestock Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- doc/guides/maintaining-V8.md | 121 +++++++++++++++++++++++++---------- 1 file changed, 86 insertions(+), 35 deletions(-) diff --git a/doc/guides/maintaining-V8.md b/doc/guides/maintaining-V8.md index 82312cf6bcf4d1..19ee61fe20c5d7 100644 --- a/doc/guides/maintaining-V8.md +++ b/doc/guides/maintaining-V8.md @@ -1,10 +1,17 @@ -# Maintaining V8 in Node +# Maintaining V8 in Node.js # Background -V8 follows the Chromium release schedule. The support horizon for Chromium is very different from the support horizon that Node.js needs to provide to its users. As a result Node.js needs to support a version of V8 for quite a bit longer than what upstream needs to support. Since V8 doesn't have an LTS supported branch, there is no official process around how the V8 branches in Node are maintained. +V8 follows the Chromium release schedule. The support horizon for Chromium is +very different from the support horizon that Node.js needs to provide to its +users. As a result Node.js needs to support a version of V8 for quite a bit +longer than what upstream needs to support. Since V8 doesn't have an LTS +supported branch, there is no official process around how the V8 branches in +Node.js are maintained. -This document attempts to document the current processes and proposes a workflow for maintaining the V8 branches in Node.js LTS and Current releases and how the Node and V8 teams at Google can help. +This document attempts to document the current processes and proposes a workflow +for maintaining the V8 branches in Node.js LTS and Current releases and how the +Node.js and V8 teams at Google can help. # V8 Release Schedule @@ -16,7 +23,8 @@ For example, at the time of this writing: * **Beta**: V8 5.5 is currently in beta. It will be promoted to stable next; approximately 6 weeks after V8 5.4 shipped as stable. * **Master**: V8 tip-of-tree corresponds to V8 5.6. This branch gets regularly released as part of the Chromium **canary** builds. This branch will be promoted to beta next when V8 5.5 ships as stable. -All older branches are considered **abandoned**, and are not maintained by the V8 team. +All older branches are considered **abandoned**, and are not maintained by the +V8 team. ## V8 merge process overview @@ -29,9 +37,11 @@ The process for backporting bug fixes to active branches is officially documente * Merge requests to an abandoned branch will be rejected. * Only bug fixes are accepted for backporting. -# Node Support Requirements +# Node.js Support Requirements -At any given time Node needs to be maintaining a few different V8 branches for the various Current, LTS, and nightly releases. At present this list includes the following branches1: +At any given time Node.js needs to be maintaining a few different V8 branches +for the various Current, LTS, and nightly releases. At present this list +includes the following branches1: @@ -49,7 +59,7 @@ At any given time Node needs to be maintaining a few different V8 branches for t - @@ -63,7 +73,7 @@ At any given time Node needs to be maintaining a few different V8 branches for t - @@ -77,7 +87,7 @@ At any given time Node needs to be maintaining a few different V8 branches for t - @@ -107,17 +117,22 @@ At any given time Node needs to be maintaining a few different V8 branches for t
Node v4.x + Node.js v4.x 2015-10-01
Node v6.x + Node.js v6.x 2016-04-01
Node v7.x + Node.js v7.x 2016-10-01
-The versions of V8 used in Node v4.x and v6.x have already been abandoned by upstream V8. However, Node.js needs to continue supporting these branches for many months (Current branches) or several years (LTS branches). +The versions of V8 used in Node.js v4.x and v6.x have already been abandoned by +upstream V8. However, Node.js needs to continue supporting these branches for +many months (Current branches) or several years (LTS branches). # Maintenance Process -Once a bug in Node.js has been identified to be caused by V8, the first step is to identify the versions of Node and V8 affected. The bug may be present in multiple different locations, each of which follows a slightly different process. +Once a bug in Node.js has been identified to be caused by V8, the first step is +to identify the versions of Node.js and V8 affected. The bug may be present in +multiple different locations, each of which follows a slightly different +process. * Unfixed bugs. The bug exists in the V8 master branch. * Fixed, but needs backport. The bug may need porting to one or more branches. * Backporting to active branches. * Backporting to abandoned branches. -* Backports identified by the V8 team. Bugs identified by upstream V8 that we haven't encountered in Node yet. +* Backports identified by the V8 team. Bugs identified by upstream V8 that we haven't encountered in Node.js yet. ## Unfixed Upstream Bugs @@ -127,7 +142,7 @@ If the bug can be reproduced on the [`vee-eight-lkgr` branch](https://github.com * Make sure to include a link to the corresponding Node.js issue (if one exists). * If the fix is simple enough, you may fix it yourself; [contributions](https://github.com/v8/v8/wiki/Contributing) are welcome. * V8's build waterfall tests your change. -* Once the bug is fixed it may still need backporting, if it exists in other V8 branches that are still active or are branches that Node cares about. Follow the process for backporting below. +* Once the bug is fixed it may still need backporting, if it exists in other V8 branches that are still active or are branches that Node.js cares about. Follow the process for backporting below. ## Backporting to Active Branches @@ -142,23 +157,28 @@ If the bug exists in any of the active V8 branches, we may need to get the fix b * Attach *merge-request-x.x* labels to the bug for any active branches that still contain the bug. (e.g. merge-request-5.3, merge-request-5.4) * Add ofrobots-at-google.com to the cc list. * Once the merge has been approved, it should be merged using the [merge script documented in the V8 wiki](https://github.com/v8/v8/wiki/Merging%20&%20Patching). Merging requires commit access to the V8 repository. If you don't have commit access you can indicate someone on the V8 team can do the merge for you. -* It is possible that the merge request may not get approved, for example if it is considered to be a feature or otherwise too risky for V8 stable. In such cases we float the patch on the Node side. See the process on 'Backporting to Abandoned branches'. +* It is possible that the merge request may not get approved, for example if it is considered to be a feature or otherwise too risky for V8 stable. In such cases we float the patch on the Node.js side. See the process on 'Backporting to Abandoned branches'. * Once the fix has been merged upstream, it can be picked up during an update of the V8 branch, (see below). ## Backporting to Abandoned Branches -Abandoned V8 branches are supported in the Node.js V8 repository. The fix needs to be cherry-picked in the Node.js repository and V8-CI must test the change. +Abandoned V8 branches are supported in the Node.js V8 repository. The fix needs +to be cherry-picked in the Node.js repository and V8-CI must test the change. * For each abandoned V8 branch corresponding to an LTS branch that is affected by the bug: * Open a cherry-pick PR on nodejs/node targeting the appropriate *vY.x-staging* branch (e.g. *v6.x-staging* to fix an issue in V8-5.1). * Increase the patch level version in v8-version.h. This will not cause any problems with versioning because V8 will not publish other patches for this branch, so Node.js can effectively bump the patch version. * In some cases the patch may require extra effort to merge in case V8 has changed substantially. For important issues we may be able to lean on the V8 team to get help with reimplementing the patch. - * Run Node's [V8-CI](https://ci.nodejs.org/job/node-test-commit-v8-linux/) in addition to the [Node CI](https://ci.nodejs.org/job/node-test-pull-request/). + * Run the Node.js [V8-CI](https://ci.nodejs.org/job/node-test-commit-v8-linux/) in addition to the [Node.js CI](https://ci.nodejs.org/job/node-test-pull-request/). -An example for workflow how to cherry-pick consider the following bug: https://crbug.com/v8/5199. From the bug we can see that it was merged by V8 into 5.2 and 5.3, and not into V8 5.1 (since it was already abandoned). Since Node.js `v6.x` uses V8 5.1, the fix needed to cherry-picked. To cherry-pick, here's an example workflow: +An example for workflow how to cherry-pick consider the following bug: +https://crbug.com/v8/5199. From the bug we can see that it was merged by V8 into +5.2 and 5.3, and not into V8 5.1 (since it was already abandoned). Since Node.js +`v6.x` uses V8 5.1, the fix needed to cherry-picked. To cherry-pick, here's an +example workflow: * Download and apply the commit linked-to in the issue (in this case a51f429). `curl -L https://github.com/v8/v8/commit/a51f429.patch | git apply --directory=deps/v8`. If the branches have diverged significantly, this may not apply cleanly. It may help to try to cherry-pick the merge to the oldest branch that was done upstream in V8. In this example, this would be the patch from the merge to 5.2. The hope is that this would be closer to the V8 5.1, and has a better chance of applying cleanly. If you're stuck, feel free to ping @ofrobots for help. -* Modify the commit message to match the format we use for V8 backports. You may want to add extra description if necessary to indicate the impact of the fix on Node. In this case the original issue was descriptive enough. Example: +* Modify the commit message to match the format we use for V8 backports. You may want to add extra description if necessary to indicate the impact of the fix on Node.js. In this case the original issue was descriptive enough. Example: ``` deps: cherry-pick a51f429 from V8 upstream @@ -182,24 +202,38 @@ PR-URL: ## Backports Identified by the V8 team -For bugs found through the browser or other channels, the V8 team marks bugs that might be applicable to the abandoned branches in use by Node.js. This is done through manual tagging by the V8 team and through an automated process that tags any fix that gets backported to the stable branch (as it is likely candidate for backporting further). +For bugs found through the browser or other channels, the V8 team marks bugs +that might be applicable to the abandoned branches in use by Node.js. This is +done through manual tagging by the V8 team and through an automated process that +tags any fix that gets backported to the stable branch (as it is likely +candidate for backporting further). Such fixes are tagged with the following labels in the V8 issue tracker: -* `NodeJS-Backport-Review` ([V8](https://bugs.chromium.org/p/v8/issues/list?can=1&q=label%3ANodeJS-Backport-Review), [Chromium](https://bugs.chromium.org/p/chromium/issues/list?can=1&q=label%3ANodeJS-Backport-Review)): to be reviewed if this is applicable to abandoned branches in use by Node.js. This list if regularly reviewed by the node team at Google to determine applicability to Node.js. +* `NodeJS-Backport-Review` ([V8](https://bugs.chromium.org/p/v8/issues/list?can=1&q=label%3ANodeJS-Backport-Review), [Chromium](https://bugs.chromium.org/p/chromium/issues/list?can=1&q=label%3ANodeJS-Backport-Review)): to be reviewed if this is applicable to abandoned branches in use by Node.js. This list if regularly reviewed by the Node.js team at Google to determine applicability to Node.js. * `NodeJS-Backport-Approved` ([V8](https://bugs.chromium.org/p/v8/issues/list?can=1&q=label%3ANodeJS-Backport-Approved), [Chromium](https://bugs.chromium.org/p/chromium/issues/list?can=1&q=label%3ANodeJS-Backport-Approved)): marks bugs that are deemed relevant to Node.js and should be backported. * `NodeJS-Backport-Done` ([V8](https://bugs.chromium.org/p/v8/issues/list?can=1&q=label%3ANodeJS-Backport-Done), [Chromium](https://bugs.chromium.org/p/chromium/issues/list?can=1&q=label%3ANodeJS-Backport-Done)): Backport for Node.js has been performed already. * `NodeJS-Backport-Rejected` ([V8](https://bugs.chromium.org/p/v8/issues/list?can=1&q=label%3ANodeJS-Backport-Rejected), [Chromium](https://bugs.chromium.org/p/chromium/issues/list?can=1&q=label%3ANodeJS-Backport-Rejected)): Backport for Node.js is not desired. -The backlog of issues with such is regularly reviewed by the node-team at Google to shepherd through the backport process. External contributors are welcome to collaborate on the backport process as well. Note that some of the bugs may be security issues and will not be visible to external collaborators. +The backlog of issues with such is regularly reviewed by the node-team at Google +to shepherd through the backport process. External contributors are welcome to +collaborate on the backport process as well. Note that some of the bugs may be +security issues and will not be visible to external collaborators. # Updating V8 -Node keeps a vendored copy of V8 inside of deps/ directory. In addition Node may need to float patches that do not exist upstream. This means that some care may need to be taken to update the vendored copy of V8. +Node.js keeps a vendored copy of V8 inside of deps/ directory. In addition +Node.js may need to float patches that do not exist upstream. This means that +some care may need to be taken to update the vendored copy of V8. ## Minor updates (patch level) -Because there may be floating patches on the version of V8 in Node.js, it is safest to apply the patch level updates as a patch. For example, imagine that upstream V8 is at 5.0.71.47 and Node.js is at 5.0.71.32. It would be best to compute the diff between these tags on the V8 repository, and then apply that patch on the copy of V8 in Node.js. This should preserve the patches/backports that Node.js may be floating (or else cause a merge conflict). +Because there may be floating patches on the version of V8 in Node.js, it is +safest to apply the patch level updates as a patch. For example, imagine that +upstream V8 is at 5.0.71.47 and Node.js is at 5.0.71.32. It would be best to +compute the diff between these tags on the V8 repository, and then apply that +patch on the copy of V8 in Node.js. This should preserve the patches/backports +that Node.js may be floating (or else cause a merge conflict). The rough outline of the process is: @@ -216,14 +250,17 @@ curl -L https://github.com/v8/v8/compare/${V8_OLD_VERSION}...${V8_NEW_VERSION}.p # You may want to amend the commit message to describe the nature of the update ``` -V8 also keeps tags of the form *5.4-lkgr* which point to the *Last Known Good Revision* from the 5.4 branch that can be useful in the update process above. +V8 also keeps tags of the form *5.4-lkgr* which point to the *Last Known Good +Revision* from the 5.4 branch that can be useful in the update process above. ## Major Updates -We upgrade the version of V8 in Node.js master whenever a V8 release goes stable upstream, that is, whenever a new release of Chrome comes out. +We upgrade the version of V8 in Node.js master whenever a V8 release goes stable +upstream, that is, whenever a new release of Chrome comes out. -Upgrading major versions would be much harder to do with the patch mechanism above. A better strategy is to +Upgrading major versions would be much harder to do with the patch mechanism +above. A better strategy is to 1. Audit the current master branch and look at the patches that have been floated since the last major V8 update. 1. Replace the copy of V8 in Node.js with a fresh checkout of the latest stable V8 branch. Special care must be taken to recursively update the DEPS that V8 has a compile time dependency on (at the moment of this writing, these are only trace_event and gtest_prod.h) @@ -235,7 +272,7 @@ To audit for floating patches: git log --oneline deps/v8 ``` -To replace the copy of V8 in Node, use the '[update-v8](https://gist.github.com/targos/8da405e96e98fdff01a395bed365b816)' script2. For example, if you want to replace the copy of V8 in Node.js with the branch-head for V8 5.1 branch: +To replace the copy of V8 in Node.js, use the '[update-v8](https://gist.github.com/targos/8da405e96e98fdff01a395bed365b816)' script2. For example, if you want to replace the copy of V8 in Node.js with the branch-head for V8 5.1 branch: ```shell cd $NODE_DIR @@ -243,13 +280,17 @@ rm -rf deps/v8 path/to/update-v8 branch-heads/5.1 ``` -You may want to look at the commits created by the above scripts, and squash them once you have reviewed them. +You may want to look at the commits created by the above scripts, and squash +them once you have reviewed them. This should be followed up with manual refloating of all relevant patches. # Proposal: Using a fork repo to track upstream V8 -The fact that Node.js keeps a vendored, potentially edited copy of V8 in deps/ makes the above processes a bit complicated. An alternative proposal would be to create a fork of V8 at nodejs/v8 that would be used to maintain the V8 branches. This has several benefits: +The fact that Node.js keeps a vendored, potentially edited copy of V8 in deps/ +makes the above processes a bit complicated. An alternative proposal would be to +create a fork of V8 at nodejs/v8 that would be used to maintain the V8 branches. +This has several benefits: * The process to update the version of V8 in Node.js could be automated to track the tips of various V8 branches in nodejs/v8. * It would simplify cherry-picking and porting of fixes between branches as the version bumps in v8-version.h would happen as part of this update instead of on every change. @@ -259,17 +300,27 @@ The fact that Node.js keeps a vendored, potentially edited copy of V8 in deps/ m This would require some tooling to: -* A script that would update the V8 in a specific Node branch with V8 from upstream (dependent on branch abandoned vs. active). +* A script that would update the V8 in a specific Node.js branch with V8 from upstream (dependent on branch abandoned vs. active). * We need a script to bump V8 version numbers when a new version of V8 is promoted from nodejs/v8 to nodejs/node. * Enabled the V8-CI build in Jenkins to build from the nodejs/v8 fork. # Proposal: Dealing with the need to float patches to a stable/beta -Sometimes upstream V8 may not want to merge a fix to their stable branches, but we might. An example of this would be a fix for a performance regression that only affects Node.js and not the browser. At the moment we don't have a mechanism to deal with this situation. If we float a patch and bump the V8 version, we might run into a problem if upstream releases a fix with the same version number. - -One idea we have been kicking around is that we could move to a 5-place version number in V8, e.g.: 5.4.500.30.${embedder}. The ${embedder} represents the number of patches an embedder is floating on top of an official V8 version. This would also help with auditing the floating patches in the Node commit history. - -We are trying this out in https://github.com/nodejs/node/pull/9754. If this ends up working, we will investigate making this change upstream. +Sometimes upstream V8 may not want to merge a fix to their stable branches, but +we might. An example of this would be a fix for a performance regression that +only affects Node.js and not the browser. At the moment we don't have a +mechanism to deal with this situation. If we float a patch and bump the V8 +version, we might run into a problem if upstream releases a fix with the same +version number. + +One idea we have been kicking around is that we could move to a 5-place version +number in V8, e.g.: 5.4.500.30.${embedder}. The ${embedder} represents the +number of patches an embedder is floating on top of an official V8 version. This +would also help with auditing the floating patches in the Node.js commit +history. + +We are trying this out in https://github.com/nodejs/node/pull/9754. If this ends +up working, we will investigate making this change upstream. ## Notes From 11ed8007df373721b6d464e38f19996c96f49f59 Mon Sep 17 00:00:00 2001 From: Sam Roberts Date: Thu, 22 Dec 2016 10:31:36 -0800 Subject: [PATCH 080/144] src: describe what NODE_MODULE_VERSION is for Current comment described what to do with it when the ABI changes, but implied that Node.js would load modules with newer ABI numbers, which it will not. PR-URL: https://github.com/nodejs/node/pull/10414 Reviewed-By: Anna Henningsen Reviewed-By: Gibson Fahnestock Reviewed-By: Italo A. Casas Reviewed-By: James M Snell Reviewed-By: Santiago Gimeno Reviewed-By: Sakthipriyan Vairamani --- src/node_version.h | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/node_version.h b/src/node_version.h index d92cfb5b1f3a5f..da55d394abfb33 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -44,6 +44,9 @@ (minor) == NODE_MINOR_VERSION && (patch) <= NODE_PATCH_VERSION)) /** + * Node.js will refuse to load modules that weren't compiled against its own + * module ABI number, exposed as the process.versions.modules property. + * * When this version number is changed, node.js will refuse * to load older modules. This should be done whenever * an API is broken in the C++ side, including in v8 or From cf3f75f6f057448cbe43d23ed64c451b164814ea Mon Sep 17 00:00:00 2001 From: Fumiya KARASAWA Date: Fri, 23 Dec 2016 00:56:31 +0900 Subject: [PATCH 081/144] doc: fixup errors in stream.md When decodeStrings is false and given data is string, _write() function receives the string data not `Buffer`. PR-URL: https://github.com/nodejs/node/pull/10411 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- doc/api/stream.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/api/stream.md b/doc/api/stream.md index 881bb2c6294258..291cda735aee3d 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -1273,8 +1273,8 @@ If the `decodeStrings` property is set in the constructor options, then indicate the character encoding of the string. This is to support implementations that have an optimized handling for certain string data encodings. If the `decodeStrings` property is explicitly set to `false`, -the `encoding` argument can be safely ignored, and `chunk` will always be a -`Buffer`. +the `encoding` argument can be safely ignored, and `chunk` will remain the same +object that is passed to `.write()`. The `writable._write()` method is prefixed with an underscore because it is internal to the class that defines it, and should never be called directly by @@ -1503,9 +1503,9 @@ Implementers, and only from within the `readable._read()` method. It is recommended that errors occurring during the processing of the `readable._read()` method are emitted using the `'error'` event rather than being thrown. Throwing an Error from within `readable._read()` can result in -expected and inconsistent behavior depending on whether the stream is operating -in flowing or paused mode. Using the `'error'` event ensures consistent and -predictable handling of errors. +unexpected and inconsistent behavior depending on whether the stream is +operating in flowing or paused mode. Using the `'error'` event ensures +consistent and predictable handling of errors. ```js const Readable = require('stream').Readable; From cff57be2b652214e52200ca2cac34e979766acdd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Legan=C3=A9s-Combarro=20=27piranna?= Date: Mon, 26 Dec 2016 14:29:25 +0100 Subject: [PATCH 082/144] build: add (not) cross-compiled configure flags Adds --cross-compiling and --no-cross-compiling flags Fixes: https://github.com/nodejs/node/issues/10271 PR-URL: https://github.com/nodejs/node/pull/10287 Reviewed-By: Anna Henningsen Reviewed-By: James M Snell Reviewed-By: Ben Noordhuis --- configure | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/configure b/configure index bf937ca702a550..92cf4c821709f7 100755 --- a/configure +++ b/configure @@ -79,6 +79,17 @@ parser.add_option('--dest-cpu', choices=valid_arch, help='CPU architecture to build for ({0})'.format(', '.join(valid_arch))) +parser.add_option('--cross-compiling', + action='store_true', + dest='cross_compiling', + default=None, + help='force build to be considered as cross compiled') +parser.add_option('--no-cross-compiling', + action='store_false', + dest='cross_compiling', + default=None, + help='force build to be considered as NOT cross compiled') + parser.add_option('--dest-os', action='store', dest='dest_os', @@ -765,7 +776,9 @@ def configure_node(o): o['variables']['target_arch'] = target_arch o['variables']['node_byteorder'] = sys.byteorder - cross_compiling = target_arch != host_arch + cross_compiling = (options.cross_compiling + if options.cross_compiling is not None + else target_arch != host_arch) want_snapshots = not options.without_snapshot o['variables']['want_separate_host_toolset'] = int( cross_compiling and want_snapshots) From 5b7b457643a0e50de605b282522da0c4d86182b9 Mon Sep 17 00:00:00 2001 From: Sam Roberts Date: Thu, 23 Apr 2015 19:33:38 +0900 Subject: [PATCH 083/144] doc: add tls.DEFAULT_ECDH_CURVE A user can change the default curve for ECDH key agreement by using tls.DEFAULT_ECDH_CURVE. PR-URL: https://github.com/nodejs/node/pull/10264 Reviewed-By: Roman Reiss Reviewed-By: Italo A. Casas Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell Reviewed-By: Shigeki Ohtsu --- doc/api/tls.md | 62 +++++++++++++++++++++++++++++--------------------- 1 file changed, 36 insertions(+), 26 deletions(-) diff --git a/doc/api/tls.md b/doc/api/tls.md index fb90f776c94270..c33174f250479a 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -926,10 +926,10 @@ added: v0.11.13 *Note*: [`tls.createServer()`][] sets the default value to `true`, other APIs that create secure contexts leave it unset. * `ecdhCurve` {string} A string describing a named curve to use for ECDH key - agreement or `false` to disable ECDH. Defaults to `prime256v1` (NIST P-256). - Use [`crypto.getCurves()`][] to obtain a list of available curve names. On - recent releases, `openssl ecparam -list_curves` will also display the name - and description of each available elliptic curve. + agreement or `false` to disable ECDH. Defaults to + [`tls.DEFAULT_ECDH_CURVE`]. Use [`crypto.getCurves()`][] to obtain a list + of available curve names. On recent releases, `openssl ecparam -list_curves` + will also display the name and description of each available elliptic curve. * `dhparam` {string|Buffer} Diffie Hellman parameters, required for [Perfect Forward Secrecy][]. Use `openssl dhparam` to create the parameters. The key length must be greater than or equal to 1024 bits, otherwise an @@ -1077,6 +1077,13 @@ For example: console.log(tls.getCiphers()); // ['AES128-SHA', 'AES256-SHA', ...] ``` +## tls.DEFAULT_ECDH_CURVE + +The default curve name to use for ECDH key agreement in a tls server. The +default value is `'prime256v1'` (NIST P-256). Consult [RFC 4492] and +[FIPS.186-4] for more details. + + ## Deprecated APIs ### Class: CryptoStream @@ -1184,32 +1191,35 @@ secure_socket = tls.TLSSocket(socket, options); where `secure_socket` has the same API as `pair.cleartext`. -[OpenSSL cipher list format documentation]: https://www.openssl.org/docs/man1.0.2/apps/ciphers.html#CIPHER-LIST-FORMAT [Chrome's 'modern cryptography' setting]: https://www.chromium.org/Home/chromium-security/education/tls#TOC-Cipher-Suites -[OpenSSL Options]: crypto.html#crypto_openssl_options -[modifying the default cipher suite]: #tls_modifying_the_default_tls_cipher_suite -[specific attacks affecting larger AES key sizes]: https://www.schneier.com/blog/archives/2009/07/another_new_aes.html -[`crypto.getCurves()`]: crypto.html#crypto_crypto_getcurves -[`tls.createServer()`]: #tls_tls_createserver_options_secureconnectionlistener -[`tls.createSecurePair()`]: #tls_tls_createsecurepair_context_isserver_requestcert_rejectunauthorized_options -[`tls.TLSSocket`]: #tls_class_tls_tlssocket -[`net.Server`]: net.html#net_class_net_server -[`net.Socket`]: net.html#net_class_net_socket -[`net.Server.address()`]: net.html#net_server_address -[`'secureConnect'`]: #tls_event_secureconnect -[`'secureConnection'`]: #tls_event_secureconnection -[Perfect Forward Secrecy]: #tls_perfect_forward_secrecy -[Stream]: stream.html#stream_stream -[SSL_METHODS]: https://www.openssl.org/docs/man1.0.2/ssl/ssl.html#DEALING-WITH-PROTOCOL-METHODS -[tls.Server]: #tls_class_tls_server -[SSL_CTX_set_timeout]: https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_timeout.html -[Forward secrecy]: https://en.wikipedia.org/wiki/Perfect_forward_secrecy [DHE]: https://en.wikipedia.org/wiki/Diffie%E2%80%93Hellman_key_exchange [ECDHE]: https://en.wikipedia.org/wiki/Elliptic_curve_Diffie%E2%80%93Hellman -[asn1.js]: https://npmjs.org/package/asn1.js +[FIPS.186-4]: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf +[Forward secrecy]: https://en.wikipedia.org/wiki/Perfect_forward_secrecy [OCSP request]: https://en.wikipedia.org/wiki/OCSP_stapling -[TLS recommendations]: https://wiki.mozilla.org/Security/Server_Side_TLS +[OpenSSL Options]: crypto.html#crypto_openssl_options +[OpenSSL cipher list format documentation]: https://www.openssl.org/docs/man1.0.2/apps/ciphers.html#CIPHER-LIST-FORMAT +[Perfect Forward Secrecy]: #tls_perfect_forward_secrecy +[RFC 4492]: https://www.rfc-editor.org/rfc/rfc4492.txt +[SSL_CTX_set_timeout]: https://www.openssl.org/docs/man1.0.2/ssl/SSL_CTX_set_timeout.html +[SSL_METHODS]: https://www.openssl.org/docs/man1.0.2/ssl/ssl.html#DEALING-WITH-PROTOCOL-METHODS +[Stream]: stream.html#stream_stream [TLS Session Tickets]: https://www.ietf.org/rfc/rfc5077.txt +[TLS recommendations]: https://wiki.mozilla.org/Security/Server_Side_TLS +[`'secureConnect'`]: #tls_event_secureconnect +[`'secureConnection'`]: #tls_event_secureconnection +[`crypto.getCurves()`]: crypto.html#crypto_crypto_getcurves +[`net.Server.address()`]: net.html#net_server_address +[`net.Server`]: net.html#net_class_net_server +[`net.Socket`]: net.html#net_class_net_socket +[`tls.DEFAULT_ECDH_CURVE`]: #tls_tls_default_ecdh_curve [`tls.TLSSocket.getPeerCertificate()`]: #tls_tlssocket_getpeercertificate_detailed -[`tls.createSecureContext()`]: #tls_tls_createsecurecontext_options +[`tls.TLSSocket`]: #tls_class_tls_tlssocket [`tls.connect()`]: #tls_tls_connect_options_callback +[`tls.createSecureContext()`]: #tls_tls_createsecurecontext_options +[`tls.createSecurePair()`]: #tls_tls_createsecurepair_context_isserver_requestcert_rejectunauthorized_options +[`tls.createServer()`]: #tls_tls_createserver_options_secureconnectionlistener +[asn1.js]: https://npmjs.org/package/asn1.js +[modifying the default cipher suite]: #tls_modifying_the_default_tls_cipher_suite +[specific attacks affecting larger AES key sizes]: https://www.schneier.com/blog/archives/2009/07/another_new_aes.html +[tls.Server]: #tls_class_tls_server From 75efdeb63569f3c463623cc57c7bc0c4958e48b4 Mon Sep 17 00:00:00 2001 From: jBarz Date: Mon, 12 Dec 2016 23:56:24 -0500 Subject: [PATCH 084/144] os: fix os.release() for aix and add test PR-URL: https://github.com/nodejs/node/pull/10245 Reviewed-By: Michael Dawson Reviewed-By: Gibson Fahnestock Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- src/node_os.cc | 7 +++++++ test/parallel/test-os.js | 3 +++ 2 files changed, 10 insertions(+) diff --git a/src/node_os.cc b/src/node_os.cc index d8276f463d6625..f8b53e45d8e669 100644 --- a/src/node_os.cc +++ b/src/node_os.cc @@ -85,7 +85,14 @@ static void GetOSRelease(const FunctionCallbackInfo& args) { if (uname(&info) < 0) { return env->ThrowErrnoException(errno, "uname"); } +# ifdef _AIX + char release[256]; + snprintf(release, sizeof(release), + "%s.%s", info.version, info.release); + rval = release; +# else rval = info.release; +# endif #else // Windows char release[256]; OSVERSIONINFOW info; diff --git a/test/parallel/test-os.js b/test/parallel/test-os.js index fa78b9dc988e8f..5a0a9f6ad14cbc 100644 --- a/test/parallel/test-os.js +++ b/test/parallel/test-os.js @@ -77,6 +77,9 @@ const release = os.release(); console.log('release = ', release); is.string(release); assert.ok(release.length > 0); +//TODO: Check format on more than just AIX +if (common.isAix) + assert.ok(/^\d+\.\d+$/.test(release)); const platform = os.platform(); console.log('platform = ', platform); From 4f1d9452dec83095e2ae80ef9fdabac2c644ef82 Mon Sep 17 00:00:00 2001 From: Paul Graham Date: Wed, 7 Dec 2016 23:47:15 -0600 Subject: [PATCH 085/144] test: swap var for let/const throughout MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Swap var for let/const throughout the common.js module. Change a snake case variable to camel case starting on line 168. PR-URL: https://github.com/nodejs/node/pull/10177 Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Sakthipriyan Vairamani Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/common.js | 49 +++++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/test/common.js b/test/common.js index 3c769345dd6b31..0fff06bfc0944d 100644 --- a/test/common.js +++ b/test/common.js @@ -38,8 +38,9 @@ exports.rootDir = exports.isWindows ? 'c:\\' : '/'; exports.buildType = process.config.target_defaults.default_configuration; function rimrafSync(p) { + let st; try { - var st = fs.lstatSync(p); + st = fs.lstatSync(p); } catch (e) { if (e.code === 'ENOENT') return; @@ -93,9 +94,9 @@ if (process.env.TEST_THREAD_ID) { } exports.tmpDir = path.join(testRoot, exports.tmpDirName); -var opensslCli = null; -var inFreeBSDJail = null; -var localhostIPv4 = null; +let opensslCli = null; +let inFreeBSDJail = null; +let localhostIPv4 = null; exports.localIPv6Hosts = ['localhost']; if (exports.isLinux) { @@ -165,8 +166,8 @@ Object.defineProperty(exports, 'opensslCli', {get: function() { if (exports.isWindows) opensslCli += '.exe'; - var openssl_cmd = child_process.spawnSync(opensslCli, ['version']); - if (openssl_cmd.status !== 0 || openssl_cmd.error !== undefined) { + const opensslCmd = child_process.spawnSync(opensslCli, ['version']); + if (opensslCmd.status !== 0 || opensslCmd.error !== undefined) { // openssl command cannot be executed opensslCli = false; } @@ -194,7 +195,7 @@ if (exports.isWindows) { exports.PIPE = exports.tmpDir + '/test.sock'; } -var ifaces = os.networkInterfaces(); +const ifaces = os.networkInterfaces(); exports.hasIPv6 = Object.keys(ifaces).some(function(name) { return /lo/.test(name) && ifaces[name].some(function(info) { return info.family === 'IPv6'; @@ -204,7 +205,7 @@ exports.hasIPv6 = Object.keys(ifaces).some(function(name) { exports.ddCommand = function(filename, kilobytes) { if (exports.isWindows) { - var p = path.resolve(exports.fixturesDir, 'create-file.js'); + const p = path.resolve(exports.fixturesDir, 'create-file.js'); return '"' + process.argv[0] + '" "' + p + '" "' + filename + '" ' + (kilobytes * 1024); } else { @@ -214,7 +215,7 @@ exports.ddCommand = function(filename, kilobytes) { exports.spawnCat = function(options) { - var spawn = require('child_process').spawn; + const spawn = require('child_process').spawn; if (exports.isWindows) { return spawn('more', [], options); @@ -225,7 +226,7 @@ exports.spawnCat = function(options) { exports.spawnSyncCat = function(options) { - var spawnSync = require('child_process').spawnSync; + const spawnSync = require('child_process').spawnSync; if (exports.isWindows) { return spawnSync('more', [], options); @@ -236,7 +237,7 @@ exports.spawnSyncCat = function(options) { exports.spawnPwd = function(options) { - var spawn = require('child_process').spawn; + const spawn = require('child_process').spawn; if (exports.isWindows) { return spawn('cmd.exe', ['/d', '/c', 'cd'], options); @@ -277,7 +278,7 @@ exports.platformTimeout = function(ms) { return ms; // ARMv8+ }; -var knownGlobals = [ +let knownGlobals = [ Buffer, clearImmediate, clearInterval, @@ -351,9 +352,9 @@ function allowGlobals(...whitelist) { exports.allowGlobals = allowGlobals; function leakedGlobals() { - var leaked = []; + const leaked = []; - for (var val in global) + for (const val in global) if (!knownGlobals.includes(global[val])) leaked.push(val); @@ -366,7 +367,7 @@ exports.globalCheck = true; process.on('exit', function() { if (!exports.globalCheck) return; - var leaked = leakedGlobals(); + const leaked = leakedGlobals(); if (leaked.length > 0) { console.error('Unknown globals: %s', leaked); fail('Unknown global found'); @@ -374,13 +375,13 @@ process.on('exit', function() { }); -var mustCallChecks = []; +const mustCallChecks = []; function runCallChecks(exitCode) { if (exitCode !== 0) return; - var failed = mustCallChecks.filter(function(context) { + const failed = mustCallChecks.filter(function(context) { return context.actual !== context.expected; }); @@ -399,7 +400,7 @@ function runCallChecks(exitCode) { exports.mustCall = function(fn, expected) { if (typeof expected !== 'number') expected = 1; - var context = { + const context = { expected: expected, actual: 0, stack: (new Error()).stack, @@ -418,9 +419,9 @@ exports.mustCall = function(fn, expected) { }; exports.hasMultiLocalhost = function hasMultiLocalhost() { - var TCP = process.binding('tcp_wrap').TCP; - var t = new TCP(); - var ret = t.bind('127.0.0.2', exports.PORT); + const TCP = process.binding('tcp_wrap').TCP; + const t = new TCP(); + const ret = t.bind('127.0.0.2', exports.PORT); t.close(); return ret === 0; }; @@ -466,7 +467,7 @@ ArrayStream.prototype.write = function() {}; exports.nodeProcessAborted = function nodeProcessAborted(exitCode, signal) { // Depending on the compiler used, node will exit with either // exit code 132 (SIGILL), 133 (SIGTRAP) or 134 (SIGABRT). - var expectedExitCodes = [132, 133, 134]; + let expectedExitCodes = [132, 133, 134]; // On platforms using KSH as the default shell (like SmartOS), // when a process aborts, KSH exits with an exit code that is @@ -495,8 +496,8 @@ exports.nodeProcessAborted = function nodeProcessAborted(exitCode, signal) { }; exports.busyLoop = function busyLoop(time) { - var startTime = Timer.now(); - var stopTime = startTime + time; + const startTime = Timer.now(); + const stopTime = startTime + time; while (Timer.now() < stopTime) {} }; From f9a5c13ff30818231c89b65a6196ba015e1b7b1f Mon Sep 17 00:00:00 2001 From: "Christopher J. Brody" Date: Wed, 7 Dec 2016 13:39:45 +0100 Subject: [PATCH 086/144] dtrace: resolve conversion warnings from SLURP_INT Resolve build warnings on Windows with the following pattern: warning C4244: '=': conversion from 'int64_t' to 'int32_t', possible loss of data PR-URL: https://github.com/nodejs/node/pull/10143 Reviewed-By: Ben Noordhuis Reviewed-By: James M Snell --- src/node_dtrace.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node_dtrace.cc b/src/node_dtrace.cc index e42c20877df126..8653b673755fde 100644 --- a/src/node_dtrace.cc +++ b/src/node_dtrace.cc @@ -57,7 +57,7 @@ using v8::Value; "expected object for " #obj " to contain integer member " #member); \ } \ *valp = obj->Get(OneByteString(env->isolate(), #member)) \ - ->ToInteger(env->isolate())->Value(); + ->Int32Value(); #define SLURP_OBJECT(obj, member, valp) \ if (!(obj)->IsObject()) { \ From 495213e545de5f041cfa0f659cbcd068654db776 Mon Sep 17 00:00:00 2001 From: "Christopher J. Brody" Date: Wed, 23 Nov 2016 11:53:13 +0100 Subject: [PATCH 087/144] url: mark ignored return value in node::url::Parse(...) This is to resolve an unused result warning in node_url.cc. Resolve macro redefinition warning on Windows PR-URL: https://github.com/nodejs/node/pull/10141 Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson Reviewed-By: James M Snell Reviewed-By: Sam Roberts --- src/node_url.cc | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/node_url.cc b/src/node_url.cc index 7502461114a7b4..9bf58944d6b927 100644 --- a/src/node_url.cc +++ b/src/node_url.cc @@ -62,7 +62,7 @@ using v8::Value; url.flags |= URL_FLAGS_TERMINATED; \ goto done; \ } -#define FAILED() \ +#define URL_FAILED() \ { \ url.flags |= URL_FLAGS_FAILED; \ goto done; \ @@ -744,7 +744,7 @@ namespace url { break; case kNoScheme: if (!has_base || (IS_CANNOT_BE_BASE(base.flags) && ch != '#')) { - FAILED() + URL_FAILED() } else if (IS_CANNOT_BE_BASE(base.flags) && ch == '#') { SET_HAVE_SCHEME() url.scheme = base.scheme; @@ -982,10 +982,10 @@ namespace url { case kHostname: if (ch == ':' && !sbflag) { if (special && buffer.size() == 0) - FAILED() + URL_FAILED() SET_HAVE_HOST() if (ParseHost(&buffer, &url.host) < 0) - FAILED() + URL_FAILED() buffer.clear(); state = kPort; if (override == kHostname) @@ -997,10 +997,10 @@ namespace url { special_back_slash) { p--; if (special && buffer.size() == 0) - FAILED() + URL_FAILED() SET_HAVE_HOST() if (ParseHost(&buffer, &url.host) < 0) - FAILED() + URL_FAILED() buffer.clear(); state = kPathStart; if (state_override) @@ -1029,14 +1029,14 @@ namespace url { if (port >= 0 && port <= 0xffff) { url.port = NormalizePort(url.scheme, port); } else if (!state_override) { - FAILED() + URL_FAILED() } buffer.clear(); } state = kPathStart; continue; } else { - FAILED(); + URL_FAILED(); } break; case kFile: @@ -1151,7 +1151,7 @@ namespace url { if (buffer != "localhost") { SET_HAVE_HOST() if (ParseHost(&buffer, &url.host) < 0) - FAILED() + URL_FAILED() } buffer.clear(); state = kPathStart; @@ -1294,7 +1294,7 @@ namespace url { argv[ARG_PATH] = Copy(isolate, url.path); } - cb->Call(context, recv, 9, argv); + (void)cb->Call(context, recv, 9, argv); } static void Parse(const FunctionCallbackInfo& args) { From 797495a84a7d3d4bbb849476022cc3058d8c2d99 Mon Sep 17 00:00:00 2001 From: Brian White Date: Sat, 24 Dec 2016 19:50:05 -0500 Subject: [PATCH 088/144] buffer: improve allocation performance MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit assertSize() is adjusted to be inlineable according to V8's default function size limits when determining inlineability. This results in up to 11% performance gains when allocating any kind of Buffer. Avoid avoids use of in, resulting in ~50% improvement when creating a Buffer from an array-like object. PR-URL: https://github.com/nodejs/node/pull/10443 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Сковорода Никита Андреевич Reviewed-By: Anna Henningsen --- benchmark/buffers/buffer-from.js | 11 ++++++++++- lib/buffer.js | 8 ++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/benchmark/buffers/buffer-from.js b/benchmark/buffers/buffer-from.js index c35a0f23e06958..c7889b2ec63f22 100644 --- a/benchmark/buffers/buffer-from.js +++ b/benchmark/buffers/buffer-from.js @@ -10,7 +10,8 @@ const bench = common.createBenchmark(main, { 'buffer', 'uint8array', 'string', - 'string-base64' + 'string-base64', + 'object' ], len: [10, 2048], n: [1024] @@ -25,6 +26,7 @@ function main(conf) { const str = 'a'.repeat(len); const buffer = Buffer.allocUnsafe(len); const uint8array = new Uint8Array(len); + const obj = { length: null }; // Results in a new, empty Buffer var i; @@ -80,6 +82,13 @@ function main(conf) { } bench.end(n); break; + case 'object': + bench.start(); + for (i = 0; i < n * 1024; i++) { + Buffer.from(obj); + } + bench.end(n); + break; default: assert.fail(null, null, 'Should not get here'); } diff --git a/lib/buffer.js b/lib/buffer.js index 557ac867e2e0fc..a6a0d59f74f52f 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -108,6 +108,9 @@ Buffer.from = function(value, encodingOrOffset, length) { Object.setPrototypeOf(Buffer, Uint8Array); +// The 'assertSize' method will remove itself from the callstack when an error +// occurs. This is done simply to keep the internal details of the +// implementation from bleeding out to users. function assertSize(size) { let err = null; @@ -117,9 +120,6 @@ function assertSize(size) { err = new RangeError('"size" argument must not be negative'); if (err) { - // The following hides the 'assertSize' method from the - // callstack. This is done simply to hide the internal - // details of the implementation from bleeding out to users. Error.captureStackTrace(err, assertSize); throw err; } @@ -258,7 +258,7 @@ function fromObject(obj) { } if (obj) { - if ('length' in obj || isArrayBuffer(obj.buffer) || + if (obj.length !== undefined || isArrayBuffer(obj.buffer) || isSharedArrayBuffer(obj.buffer)) { if (typeof obj.length !== 'number' || obj.length !== obj.length) { return new FastBuffer(); From 1aa3ab1ec6f5b4091f6cbf33ed44790336f66439 Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Sat, 24 Dec 2016 16:07:52 -0500 Subject: [PATCH 089/144] test: refactor the code in test-fs-chmod * use const and let instead of var * use common.mustCall to control functions executions * use assert.strictEqual instead of assert.equal * use assert.ifError to handle errors * use arrow functions * remove unnecessary variables PR-URL: https://github.com/nodejs/node/pull/10440 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- test/parallel/test-fs-chmod.js | 117 ++++++++++++++------------------- 1 file changed, 51 insertions(+), 66 deletions(-) diff --git a/test/parallel/test-fs-chmod.js b/test/parallel/test-fs-chmod.js index 0e1d7898b6562d..2e5d839ae97720 100644 --- a/test/parallel/test-fs-chmod.js +++ b/test/parallel/test-fs-chmod.js @@ -1,12 +1,11 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var path = require('path'); -var fs = require('fs'); -var got_error = false; -var success_count = 0; -var mode_async; -var mode_sync; +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); +const fs = require('fs'); + +let mode_async; +let mode_sync; // Need to hijack fs.open/close to make sure that things // get closed once they're opened. @@ -19,7 +18,7 @@ fs._closeSync = fs.closeSync; fs.close = close; fs.closeSync = closeSync; -var openCount = 0; +let openCount = 0; function open() { openCount++; @@ -54,57 +53,49 @@ if (common.isWindows) { const file1 = path.join(common.fixturesDir, 'a.js'); const file2 = path.join(common.fixturesDir, 'a1.js'); -fs.chmod(file1, mode_async.toString(8), function(err) { - if (err) { - got_error = true; +fs.chmod(file1, mode_async.toString(8), common.mustCall((err) => { + assert.ifError(err); + + console.log(fs.statSync(file1).mode); + + if (common.isWindows) { + assert.ok((fs.statSync(file1).mode & 0o777) & mode_async); } else { - console.log(fs.statSync(file1).mode); + assert.strictEqual(mode_async, fs.statSync(file1).mode & 0o777); + } + + fs.chmodSync(file1, mode_sync); + if (common.isWindows) { + assert.ok((fs.statSync(file1).mode & 0o777) & mode_sync); + } else { + assert.strictEqual(mode_sync, fs.statSync(file1).mode & 0o777); + } +})); + +fs.open(file2, 'a', common.mustCall((err, fd) => { + assert.ifError(err); + + fs.fchmod(fd, mode_async.toString(8), common.mustCall((err) => { + assert.ifError(err); + + console.log(fs.fstatSync(fd).mode); if (common.isWindows) { - assert.ok((fs.statSync(file1).mode & 0o777) & mode_async); + assert.ok((fs.fstatSync(fd).mode & 0o777) & mode_async); } else { - assert.equal(mode_async, fs.statSync(file1).mode & 0o777); + assert.strictEqual(mode_async, fs.fstatSync(fd).mode & 0o777); } - fs.chmodSync(file1, mode_sync); + fs.fchmodSync(fd, mode_sync); if (common.isWindows) { - assert.ok((fs.statSync(file1).mode & 0o777) & mode_sync); + assert.ok((fs.fstatSync(fd).mode & 0o777) & mode_sync); } else { - assert.equal(mode_sync, fs.statSync(file1).mode & 0o777); + assert.strictEqual(mode_sync, fs.fstatSync(fd).mode & 0o777); } - success_count++; - } -}); -fs.open(file2, 'a', function(err, fd) { - if (err) { - got_error = true; - console.error(err.stack); - return; - } - fs.fchmod(fd, mode_async.toString(8), function(err) { - if (err) { - got_error = true; - } else { - console.log(fs.fstatSync(fd).mode); - - if (common.isWindows) { - assert.ok((fs.fstatSync(fd).mode & 0o777) & mode_async); - } else { - assert.equal(mode_async, fs.fstatSync(fd).mode & 0o777); - } - - fs.fchmodSync(fd, mode_sync); - if (common.isWindows) { - assert.ok((fs.fstatSync(fd).mode & 0o777) & mode_sync); - } else { - assert.equal(mode_sync, fs.fstatSync(fd).mode & 0o777); - } - success_count++; - fs.close(fd); - } - }); -}); + fs.close(fd); + })); +})); // lchmod if (fs.lchmod) { @@ -113,25 +104,19 @@ if (fs.lchmod) { common.refreshTmpDir(); fs.symlinkSync(file2, link); - fs.lchmod(link, mode_async, function(err) { - if (err) { - got_error = true; - } else { - console.log(fs.lstatSync(link).mode); - assert.equal(mode_async, fs.lstatSync(link).mode & 0o777); + fs.lchmod(link, mode_async, common.mustCall((err) => { + assert.ifError(err); - fs.lchmodSync(link, mode_sync); - assert.equal(mode_sync, fs.lstatSync(link).mode & 0o777); - success_count++; - } - }); -} else { - success_count++; + console.log(fs.lstatSync(link).mode); + assert.strictEqual(mode_async, fs.lstatSync(link).mode & 0o777); + + fs.lchmodSync(link, mode_sync); + assert.strictEqual(mode_sync, fs.lstatSync(link).mode & 0o777); + + })); } process.on('exit', function() { - assert.equal(3, success_count); - assert.equal(0, openCount); - assert.equal(false, got_error); + assert.strictEqual(0, openCount); }); From 5164b56224e0e3ce0d1fc0347aeb7b8866b4520a Mon Sep 17 00:00:00 2001 From: Sarah Meyer Date: Thu, 1 Dec 2016 12:03:48 -0600 Subject: [PATCH 090/144] test: add test for SIGWINCH handling by stdio.js PR-URL: https://github.com/nodejs/node/pull/10063 Reviewed-By: James M Snell --- .../test-stderr-stdout-handle-sigwinch.js | 29 +++++++++++++++++++ .../test-stderr-stdout-handle-sigwinch.out | 2 ++ 2 files changed, 31 insertions(+) create mode 100644 test/pseudo-tty/test-stderr-stdout-handle-sigwinch.js create mode 100644 test/pseudo-tty/test-stderr-stdout-handle-sigwinch.out diff --git a/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.js b/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.js new file mode 100644 index 00000000000000..f1a95559b9dc92 --- /dev/null +++ b/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.js @@ -0,0 +1,29 @@ +'use strict'; +const common = require('../common'); + +const originalRefreshSizeStderr = process.stderr._refreshSize; +const originalRefreshSizeStdout = process.stdout._refreshSize; + +const wrap = (fn, ioStream, string) => { + return () => { + // The console.log() call prints a string that is in the .out file. In other + // words, the console.log() is part of the test, not extraneous debugging. + console.log(string); + try { + fn.call(ioStream); + } catch (e) { + // EINVAL happens on SmartOS if emulation is incomplete + if (!common.isSunOS || e.code !== 'EINVAL') + throw e; + } + }; +}; + +process.stderr._refreshSize = wrap(originalRefreshSizeStderr, + process.stderr, + 'calling stderr._refreshSize'); +process.stdout._refreshSize = wrap(originalRefreshSizeStdout, + process.stdout, + 'calling stdout._refreshSize'); + +process.emit('SIGWINCH'); diff --git a/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.out b/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.out new file mode 100644 index 00000000000000..dffbe030404487 --- /dev/null +++ b/test/pseudo-tty/test-stderr-stdout-handle-sigwinch.out @@ -0,0 +1,2 @@ +calling stdout._refreshSize +calling stderr._refreshSize From e86bf27fe8007aa4195affa624e53e22f2283341 Mon Sep 17 00:00:00 2001 From: Matt Crummey Date: Thu, 1 Dec 2016 17:59:47 +0000 Subject: [PATCH 091/144] doc: improve rinfo object documentation Provide details for fields of rinfo object of UDP message event. PR-URL: https://github.com/nodejs/node/pull/10050 Reviewed-By: James M Snell --- doc/api/dgram.md | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/doc/api/dgram.md b/doc/api/dgram.md index 695ec3712eeddc..c059b6dc2ba4ff 100644 --- a/doc/api/dgram.md +++ b/doc/api/dgram.md @@ -70,20 +70,14 @@ datagram messages. This occurs as soon as UDP sockets are created. added: v0.1.99 --> +The `'message'` event is emitted when a new datagram is available on a socket. +The event handler function is passed two arguments: `msg` and `rinfo`. * `msg` {Buffer} - The message * `rinfo` {Object} - Remote address information - -The `'message'` event is emitted when a new datagram is available on a socket. -The event handler function is passed two arguments: `msg` and `rinfo`. The -`msg` argument is a [`Buffer`][] and `rinfo` is an object with the sender's -address information provided by the `address`, `family` and `port` properties: - -```js -socket.on('message', (msg, rinfo) => { - console.log('Received %d bytes from %s:%d\n', - msg.length, rinfo.address, rinfo.port); -}); -``` + * `address` {String} The sender address + * `family` {String} The address family (`'IPv4'` or `'IPv6'`) + * `port` {Number} The sender port + * `size` {Number} The message size ### socket.addMembership(multicastAddress[, multicastInterface]) - - -##### Description of change - From b094b49659f82dd2a23f4460d5dea6a675f327b9 Mon Sep 17 00:00:00 2001 From: Brian White Date: Sun, 18 Dec 2016 08:48:40 -0500 Subject: [PATCH 101/144] http: reuse existing headers array for raw values PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_incoming.js | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/lib/_http_incoming.js b/lib/_http_incoming.js index 5c04ab9dfe3331..d02f19424c0442 100644 --- a/lib/_http_incoming.js +++ b/lib/_http_incoming.js @@ -103,21 +103,17 @@ IncomingMessage.prototype.destroy = function destroy(error) { IncomingMessage.prototype._addHeaderLines = _addHeaderLines; function _addHeaderLines(headers, n) { if (headers && headers.length) { - var raw, dest; + var dest; if (this.complete) { - raw = this.rawTrailers; + this.rawTrailers = headers; dest = this.trailers; } else { - raw = this.rawHeaders; + this.rawHeaders = headers; dest = this.headers; } for (var i = 0; i < n; i += 2) { - var k = headers[i]; - var v = headers[i + 1]; - raw.push(k); - raw.push(v); - this._addHeaderLine(k, v, dest); + this._addHeaderLine(headers[i], headers[i + 1], dest); } } } From 1f0fd7b35d66bade65740b123e2502507b21d30b Mon Sep 17 00:00:00 2001 From: Brian White Date: Sun, 18 Dec 2016 08:56:52 -0500 Subject: [PATCH 102/144] http: misc cleanup and minor optimizations PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_common.js | 1 - lib/_http_outgoing.js | 8 +++----- lib/_http_server.js | 9 +++++---- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/lib/_http_common.js b/lib/_http_common.js index 46408077316207..b0767fd43ca1b8 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -198,7 +198,6 @@ function freeParser(parser, req, socket) { parser[kOnExecute] = null; if (parsers.free(parser) === false) parser.close(); - parser = null; } if (req) { req.parser = null; diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index b85e3c54074d82..8d134ecc501083 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -380,8 +380,7 @@ OutgoingMessage.prototype.getHeader = function getHeader(name) { if (!this._headers) return; - var key = name.toLowerCase(); - return this._headers[key]; + return this._headers[name.toLowerCase()]; }; @@ -585,7 +584,6 @@ OutgoingMessage.prototype.end = function end(data, encoding, callback) { if (this.connection && data) this.connection.cork(); - var ret; if (data) { // Normal body write. this.write(data, encoding); @@ -598,6 +596,7 @@ OutgoingMessage.prototype.end = function end(data, encoding, callback) { this.emit('finish'); }; + var ret; if (this._hasBody && this.chunkedEncoding) { ret = this._send('0\r\n' + this._trailer + '\r\n', 'latin1', finish); } else { @@ -677,8 +676,7 @@ OutgoingMessage.prototype._flushOutput = function _flushOutput(socket) { var outputCallbacks = this.outputCallbacks; socket.cork(); for (var i = 0; i < outputLength; i++) { - ret = socket.write(output[i], outputEncodings[i], - outputCallbacks[i]); + ret = socket.write(output[i], outputEncodings[i], outputCallbacks[i]); } socket.uncork(); diff --git a/lib/_http_server.js b/lib/_http_server.js index c5f92d52d54d9b..b32e484b4bcfa3 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -88,6 +88,8 @@ function ServerResponse(req) { if (req.method === 'HEAD') this._hasBody = false; this.sendDate = true; + this._sent100 = false; + this._expect_continue = false; if (req.httpVersionMajor < 1 || req.httpVersionMinor < 1) { this.useChunkedEncodingByDefault = chunkExpression.test(req.headers.te); @@ -195,8 +197,7 @@ function writeHead(statusCode, reason, obj) { if (common._checkInvalidHeaderChar(this.statusMessage)) throw new Error('Invalid character in statusMessage.'); - var statusLine = 'HTTP/1.1 ' + statusCode.toString() + ' ' + - this.statusMessage + CRLF; + var statusLine = 'HTTP/1.1 ' + statusCode + ' ' + this.statusMessage + CRLF; if (statusCode === 204 || statusCode === 304 || (100 <= statusCode && statusCode <= 199)) { @@ -232,7 +233,7 @@ function Server(requestListener) { net.Server.call(this, { allowHalfOpen: true }); if (requestListener) { - this.addListener('request', requestListener); + this.on('request', requestListener); } /* eslint-disable max-len */ @@ -242,7 +243,7 @@ function Server(requestListener) { /* eslint-enable max-len */ this.httpAllowHalfOpen = false; - this.addListener('connection', connectionListener); + this.on('connection', connectionListener); this.timeout = 2 * 60 * 1000; From df8b8b257d2d94905a5bcf47cc4e0514fa717e96 Mon Sep 17 00:00:00 2001 From: Brian White Date: Sun, 18 Dec 2016 21:20:29 -0500 Subject: [PATCH 103/144] http: refactor server connection handling PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_server.js | 440 +++++++++++++++++++++++--------------------- 1 file changed, 226 insertions(+), 214 deletions(-) diff --git a/lib/_http_server.js b/lib/_http_server.js index b32e484b4bcfa3..21113fc37a9f5b 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -264,40 +264,6 @@ exports.Server = Server; function connectionListener(socket) { - var self = this; - var outgoing = []; - var incoming = []; - var outgoingData = 0; - - function updateOutgoingData(delta) { - // `outgoingData` is an approximate amount of bytes queued through all - // inactive responses. If more data than the high watermark is queued - we - // need to pause TCP socket/HTTP parser, and wait until the data will be - // sent to the client. - outgoingData += delta; - if (socket._paused && outgoingData < socket._writableState.highWaterMark) - return socketOnDrain(); - } - - function abortIncoming() { - while (incoming.length) { - var req = incoming.shift(); - req.emit('aborted'); - req.emit('close'); - } - // abort socket._httpMessage ? - } - - function serverSocketCloseListener() { - debug('server socket close'); - // mark this parser as reusable - if (this.parser) { - freeParser(this.parser, null, this); - } - - abortIncoming(); - } - debug('SERVER new http connection'); httpSocketSetup(socket); @@ -305,18 +271,9 @@ function connectionListener(socket) { // If the user has added a listener to the server, // request, or response, then it's their responsibility. // otherwise, destroy on timeout by default - if (self.timeout) - socket.setTimeout(self.timeout); - socket.on('timeout', function() { - var req = socket.parser && socket.parser.incoming; - var reqTimeout = req && !req.complete && req.emit('timeout', socket); - var res = socket._httpMessage; - var resTimeout = res && res.emit('timeout', socket); - var serverTimeout = self.emit('timeout', socket); - - if (!reqTimeout && !resTimeout && !serverTimeout) - socket.destroy(); - }); + if (this.timeout) + socket.setTimeout(this.timeout); + socket.on('timeout', socketOnTimeout.bind(undefined, this, socket)); var parser = parsers.alloc(); parser.reinitialize(HTTPParser.REQUEST); @@ -332,17 +289,34 @@ function connectionListener(socket) { parser.maxHeaderPairs = 2000; } - socket.addListener('error', socketOnError); - socket.addListener('close', serverSocketCloseListener); - parser.onIncoming = parserOnIncoming; - socket.on('end', socketOnEnd); - socket.on('data', socketOnData); + var state = { + onData: null, + onError: null, + onEnd: null, + onClose: null, + outgoing: [], + incoming: [], + // `outgoingData` is an approximate amount of bytes queued through all + // inactive responses. If more data than the high watermark is queued - we + // need to pause TCP socket/HTTP parser, and wait until the data will be + // sent to the client. + outgoingData: 0 + }; + state.onData = socketOnData.bind(undefined, this, socket, parser, state); + state.onError = socketOnError.bind(undefined, this, socket, state); + state.onEnd = socketOnEnd.bind(undefined, this, socket, parser, state); + state.onClose = socketOnClose.bind(undefined, socket, state); + socket.on('data', state.onData); + socket.on('error', state.onError); + socket.on('end', state.onEnd); + socket.on('close', state.onClose); + parser.onIncoming = parserOnIncoming.bind(undefined, this, socket, state); // We are consuming socket, so it won't get any actual data socket.on('resume', onSocketResume); socket.on('pause', onSocketPause); - socket.on('drain', socketOnDrain); + socket.on('drain', socketOnDrain.bind(undefined, socket, state)); // Override on to unconsume on `data`, `readable` listeners socket.on = socketOnWrap; @@ -352,205 +326,243 @@ function connectionListener(socket) { parser._consumed = true; parser.consume(external); } - external = null; - parser[kOnExecute] = onParserExecute; + parser[kOnExecute] = + onParserExecute.bind(undefined, this, socket, parser, state); - // TODO(isaacs): Move all these functions out of here - function socketOnError(e) { - // Ignore further errors - this.removeListener('error', socketOnError); - this.on('error', () => {}); + socket._paused = false; +} +exports._connectionListener = connectionListener; - if (!self.emit('clientError', e, this)) - this.destroy(e); +function updateOutgoingData(socket, state, delta) { + state.outgoingData += delta; + if (socket._paused && + state.outgoingData < socket._writableState.highWaterMark) { + return socketOnDrain(socket, state); } +} - function socketOnData(d) { - assert(!socket._paused); - debug('SERVER socketOnData %d', d.length); - var ret = parser.execute(d); +function socketOnDrain(socket, state) { + var needPause = state.outgoingData > socket._writableState.highWaterMark; - onParserExecuteCommon(ret, d); + // If we previously paused, then start reading again. + if (socket._paused && !needPause) { + socket._paused = false; + if (socket.parser) + socket.parser.resume(); + socket.resume(); } +} + +function socketOnTimeout(server, socket) { + var req = socket.parser && socket.parser.incoming; + var reqTimeout = req && !req.complete && req.emit('timeout', socket); + var res = socket._httpMessage; + var resTimeout = res && res.emit('timeout', socket); + var serverTimeout = server.emit('timeout', socket); + + if (!reqTimeout && !resTimeout && !serverTimeout) + socket.destroy(); +} - function onParserExecute(ret, d) { - socket._unrefTimer(); - debug('SERVER socketOnParserExecute %d', ret); - onParserExecuteCommon(ret, undefined); +function socketOnClose(socket, state) { + debug('server socket close'); + // mark this parser as reusable + if (socket.parser) { + freeParser(socket.parser, null, socket); } - function onParserExecuteCommon(ret, d) { - if (ret instanceof Error) { - debug('parse error'); - socketOnError.call(socket, ret); - } else if (parser.incoming && parser.incoming.upgrade) { - // Upgrade or CONNECT - var bytesParsed = ret; - var req = parser.incoming; - debug('SERVER upgrade or connect', req.method); - - if (!d) - d = parser.getCurrentBuffer(); - - socket.removeListener('data', socketOnData); - socket.removeListener('end', socketOnEnd); - socket.removeListener('close', serverSocketCloseListener); - unconsume(parser, socket); - parser.finish(); - freeParser(parser, req, null); - parser = null; - - var eventName = req.method === 'CONNECT' ? 'connect' : 'upgrade'; - if (self.listenerCount(eventName) > 0) { - debug('SERVER have listener for %s', eventName); - var bodyHead = d.slice(bytesParsed, d.length); - - // TODO(isaacs): Need a way to reset a stream to fresh state - // IE, not flowing, and not explicitly paused. - socket._readableState.flowing = null; - self.emit(eventName, req, socket, bodyHead); - } else { - // Got upgrade header or CONNECT method, but have no handler. - socket.destroy(); - } - } + abortIncoming(state.incoming); +} - if (socket._paused && socket.parser) { - // onIncoming paused the socket, we should pause the parser as well - debug('pause parser'); - socket.parser.pause(); - } +function abortIncoming(incoming) { + while (incoming.length) { + var req = incoming.shift(); + req.emit('aborted'); + req.emit('close'); } + // abort socket._httpMessage ? +} - function socketOnEnd() { - var socket = this; - var ret = parser.finish(); +function socketOnEnd(server, socket, parser, state) { + var ret = parser.finish(); - if (ret instanceof Error) { - debug('parse error'); - socketOnError.call(socket, ret); - return; - } + if (ret instanceof Error) { + debug('parse error'); + state.onError(ret); + return; + } - if (!self.httpAllowHalfOpen) { - abortIncoming(); - if (socket.writable) socket.end(); - } else if (outgoing.length) { - outgoing[outgoing.length - 1]._last = true; - } else if (socket._httpMessage) { - socket._httpMessage._last = true; - } else { - if (socket.writable) socket.end(); - } + if (!server.httpAllowHalfOpen) { + abortIncoming(state.incoming); + if (socket.writable) socket.end(); + } else if (state.outgoing.length) { + state.outgoing[state.outgoing.length - 1]._last = true; + } else if (socket._httpMessage) { + socket._httpMessage._last = true; + } else { + if (socket.writable) socket.end(); } +} +function socketOnData(server, socket, parser, state, d) { + assert(!socket._paused); + debug('SERVER socketOnData %d', d.length); + var ret = parser.execute(d); - // The following callback is issued after the headers have been read on a - // new message. In this callback we setup the response object and pass it - // to the user. + onParserExecuteCommon(server, socket, parser, state, ret, d); +} - socket._paused = false; - function socketOnDrain() { - var needPause = outgoingData > socket._writableState.highWaterMark; - - // If we previously paused, then start reading again. - if (socket._paused && !needPause) { - socket._paused = false; - if (socket.parser) - socket.parser.resume(); - socket.resume(); +function onParserExecute(server, socket, parser, state, ret, d) { + socket._unrefTimer(); + debug('SERVER socketOnParserExecute %d', ret); + onParserExecuteCommon(server, socket, parser, state, ret, undefined); +} + +function socketOnError(server, socket, state, e) { + // Ignore further errors + socket.removeListener('error', state.onError); + socket.on('error', () => {}); + + if (!server.emit('clientError', e, socket)) + socket.destroy(e); +} + +function onParserExecuteCommon(server, socket, parser, state, ret, d) { + if (ret instanceof Error) { + debug('parse error'); + state.onError(ret); + } else if (parser.incoming && parser.incoming.upgrade) { + // Upgrade or CONNECT + var bytesParsed = ret; + var req = parser.incoming; + debug('SERVER upgrade or connect', req.method); + + if (!d) + d = parser.getCurrentBuffer(); + + socket.removeListener('data', state.onData); + socket.removeListener('end', state.onEnd); + socket.removeListener('close', state.onClose); + unconsume(parser, socket); + parser.finish(); + freeParser(parser, req, null); + parser = null; + + var eventName = req.method === 'CONNECT' ? 'connect' : 'upgrade'; + if (server.listenerCount(eventName) > 0) { + debug('SERVER have listener for %s', eventName); + var bodyHead = d.slice(bytesParsed, d.length); + + // TODO(isaacs): Need a way to reset a stream to fresh state + // IE, not flowing, and not explicitly paused. + socket._readableState.flowing = null; + server.emit(eventName, req, socket, bodyHead); + } else { + // Got upgrade header or CONNECT method, but have no handler. + socket.destroy(); } } - function parserOnIncoming(req, shouldKeepAlive) { - incoming.push(req); - - // If the writable end isn't consuming, then stop reading - // so that we don't become overwhelmed by a flood of - // pipelined requests that may never be resolved. - if (!socket._paused) { - var needPause = socket._writableState.needDrain || - outgoingData >= socket._writableState.highWaterMark; - if (needPause) { - socket._paused = true; - // We also need to pause the parser, but don't do that until after - // the call to execute, because we may still be processing the last - // chunk. - socket.pause(); - } - } + if (socket._paused && socket.parser) { + // onIncoming paused the socket, we should pause the parser as well + debug('pause parser'); + socket.parser.pause(); + } +} - var res = new ServerResponse(req); - res._onPendingData = updateOutgoingData; +function resOnFinish(req, res, socket, state) { + // Usually the first incoming element should be our request. it may + // be that in the case abortIncoming() was called that the incoming + // array will be empty. + assert(state.incoming.length === 0 || state.incoming[0] === req); - res.shouldKeepAlive = shouldKeepAlive; - DTRACE_HTTP_SERVER_REQUEST(req, socket); - LTTNG_HTTP_SERVER_REQUEST(req, socket); - COUNTER_HTTP_SERVER_REQUEST(); + state.incoming.shift(); - if (socket._httpMessage) { - // There are already pending outgoing res, append. - outgoing.push(res); - } else { - res.assignSocket(socket); + // if the user never called req.read(), and didn't pipe() or + // .resume() or .on('data'), then we call req._dump() so that the + // bytes will be pulled off the wire. + if (!req._consuming && !req._readableState.resumeScheduled) + req._dump(); + + res.detachSocket(socket); + + if (res._last) { + socket.destroySoon(); + } else { + // start sending the next message + var m = state.outgoing.shift(); + if (m) { + m.assignSocket(socket); } + } +} - // When we're finished writing the response, check if this is the last - // response, if so destroy the socket. - res.on('finish', resOnFinish); - function resOnFinish() { - // Usually the first incoming element should be our request. it may - // be that in the case abortIncoming() was called that the incoming - // array will be empty. - assert(incoming.length === 0 || incoming[0] === req); +// The following callback is issued after the headers have been read on a +// new message. In this callback we setup the response object and pass it +// to the user. +function parserOnIncoming(server, socket, state, req, keepAlive) { + state.incoming.push(req); + + // If the writable end isn't consuming, then stop reading + // so that we don't become overwhelmed by a flood of + // pipelined requests that may never be resolved. + if (!socket._paused) { + var needPause = socket._writableState.needDrain || + state.outgoingData >= socket._writableState.highWaterMark; + if (needPause) { + socket._paused = true; + // We also need to pause the parser, but don't do that until after + // the call to execute, because we may still be processing the last + // chunk. + socket.pause(); + } + } - incoming.shift(); + var res = new ServerResponse(req); + res._onPendingData = updateOutgoingData.bind(undefined, socket, state); - // if the user never called req.read(), and didn't pipe() or - // .resume() or .on('data'), then we call req._dump() so that the - // bytes will be pulled off the wire. - if (!req._consuming && !req._readableState.resumeScheduled) - req._dump(); + res.shouldKeepAlive = keepAlive; + DTRACE_HTTP_SERVER_REQUEST(req, socket); + LTTNG_HTTP_SERVER_REQUEST(req, socket); + COUNTER_HTTP_SERVER_REQUEST(); - res.detachSocket(socket); + if (socket._httpMessage) { + // There are already pending outgoing res, append. + state.outgoing.push(res); + } else { + res.assignSocket(socket); + } - if (res._last) { - socket.destroySoon(); - } else { - // start sending the next message - var m = outgoing.shift(); - if (m) { - m.assignSocket(socket); - } - } - } + // When we're finished writing the response, check if this is the last + // response, if so destroy the socket. + var finish = + resOnFinish.bind(undefined, req, res, socket, state); + res.on('finish', finish); + + if (req.headers.expect !== undefined && + (req.httpVersionMajor === 1 && req.httpVersionMinor === 1)) { + if (continueExpression.test(req.headers.expect)) { + res._expect_continue = true; - if (req.headers.expect !== undefined && - (req.httpVersionMajor === 1 && req.httpVersionMinor === 1)) { - if (continueExpression.test(req.headers.expect)) { - res._expect_continue = true; - - if (self.listenerCount('checkContinue') > 0) { - self.emit('checkContinue', req, res); - } else { - res.writeContinue(); - self.emit('request', req, res); - } + if (server.listenerCount('checkContinue') > 0) { + server.emit('checkContinue', req, res); } else { - if (self.listenerCount('checkExpectation') > 0) { - self.emit('checkExpectation', req, res); - } else { - res.writeHead(417); - res.end(); - } + res.writeContinue(); + server.emit('request', req, res); } } else { - self.emit('request', req, res); + if (server.listenerCount('checkExpectation') > 0) { + server.emit('checkExpectation', req, res); + } else { + res.writeHead(417); + res.end(); + } } - return false; // Not a HEAD response. (Not even a response!) + } else { + server.emit('request', req, res); } + return false; // Not a HEAD response. (Not even a response!) } -exports._connectionListener = connectionListener; function onSocketResume() { // It may seem that the socket is resumed, but this is an enemy's trick to From 8a2a763f13c7a940c7188bfee10556a9a815bb99 Mon Sep 17 00:00:00 2001 From: Brian White Date: Sun, 18 Dec 2016 21:21:57 -0500 Subject: [PATCH 104/144] http: improve validation performance The new table-based lookups perform significantly better for the common cases (checking latin1 characters). PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_common.js | 100 ++++++++++++++++++++++++++------------------ 1 file changed, 59 insertions(+), 41 deletions(-) diff --git a/lib/_http_common.js b/lib/_http_common.js index b0767fd43ca1b8..2ce523fe6298bd 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -246,44 +246,44 @@ exports.httpSocketSetup = httpSocketSetup; * so take care when making changes to the implementation so that the source * code size does not exceed v8's default max_inlined_source_size setting. **/ -function isValidTokenChar(ch) { - if (ch >= 94 && ch <= 122) - return true; - if (ch >= 65 && ch <= 90) - return true; - if (ch === 45) - return true; - if (ch >= 48 && ch <= 57) - return true; - if (ch === 34 || ch === 40 || ch === 41 || ch === 44) +var validTokens = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0 - 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 + 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, // 32 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, // 48 - 63 + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64 - 79 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, // 80 - 95 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96 - 111 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, // 112 - 127 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 128 ... + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 // ... 255 +]; +function checkIsHttpToken(val) { + if (typeof val !== 'string' || val.length === 0) + return false; + if (!validTokens[val.charCodeAt(0)]) return false; - if (ch >= 33 && ch <= 46) + if (val.length < 2) return true; - if (ch === 124 || ch === 126) + if (!validTokens[val.charCodeAt(1)]) + return false; + if (val.length < 3) return true; - return false; -} -function checkIsHttpToken(val) { - if (typeof val !== 'string' || val.length === 0) + if (!validTokens[val.charCodeAt(2)]) return false; - if (!isValidTokenChar(val.charCodeAt(0))) + if (val.length < 4) + return true; + if (!validTokens[val.charCodeAt(3)]) return false; - const len = val.length; - if (len > 1) { - if (!isValidTokenChar(val.charCodeAt(1))) + for (var i = 4; i < val.length; ++i) { + if (!validTokens[val.charCodeAt(i)]) return false; - if (len > 2) { - if (!isValidTokenChar(val.charCodeAt(2))) - return false; - if (len > 3) { - if (!isValidTokenChar(val.charCodeAt(3))) - return false; - for (var i = 4; i < len; i++) { - if (!isValidTokenChar(val.charCodeAt(i))) - return false; - } - } - } } return true; } @@ -299,26 +299,44 @@ exports._checkIsHttpToken = checkIsHttpToken; * so take care when making changes to the implementation so that the source * code size does not exceed v8's default max_inlined_source_size setting. **/ +var validHdrChars = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, // 0 - 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 16 - 31 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 32 - 47 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 48 - 63 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 64 - 79 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 80 - 95 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 96 - 111 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, // 112 - 127 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 128 ... + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 // ... 255 +]; function checkInvalidHeaderChar(val) { val += ''; if (val.length < 1) return false; - var c = val.charCodeAt(0); - if ((c <= 31 && c !== 9) || c > 255 || c === 127) + if (!validHdrChars[val.charCodeAt(0)]) return true; if (val.length < 2) return false; - c = val.charCodeAt(1); - if ((c <= 31 && c !== 9) || c > 255 || c === 127) + if (!validHdrChars[val.charCodeAt(1)]) return true; if (val.length < 3) return false; - c = val.charCodeAt(2); - if ((c <= 31 && c !== 9) || c > 255 || c === 127) + if (!validHdrChars[val.charCodeAt(2)]) + return true; + if (val.length < 4) + return false; + if (!validHdrChars[val.charCodeAt(3)]) return true; - for (var i = 3; i < val.length; ++i) { - c = val.charCodeAt(i); - if ((c <= 31 && c !== 9) || c > 255 || c === 127) + for (var i = 4; i < val.length; ++i) { + if (!validHdrChars[val.charCodeAt(i)]) return true; } return false; From aed5e2745117dad40c43bcb687d7dddc4906ae16 Mon Sep 17 00:00:00 2001 From: Brian White Date: Sun, 18 Dec 2016 21:25:31 -0500 Subject: [PATCH 105/144] lib: avoid recompilation of anonymous functions Since at least V8 5.4, using function.bind() is now fast enough to use to avoid recompiling/reoptimizing the same anonymous functions. These changes especially impact http servers. PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_outgoing.js | 7 ++++--- lib/_stream_writable.js | 35 +++++++++++++++++------------------ 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 8d134ecc501083..5a9e34bedd7903 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -545,6 +545,9 @@ OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { const crlf_buf = Buffer.from('\r\n'); +function onFinish(outmsg) { + outmsg.emit('finish'); +} OutgoingMessage.prototype.end = function end(data, encoding, callback) { if (typeof data === 'function') { @@ -592,9 +595,7 @@ OutgoingMessage.prototype.end = function end(data, encoding, callback) { if (typeof callback === 'function') this.once('finish', callback); - const finish = () => { - this.emit('finish'); - }; + var finish = onFinish.bind(undefined, this); var ret; if (this._hasBody && this.chunkedEncoding) { diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js index 06499fc947fe30..b20fe8d2ea91ed 100644 --- a/lib/_stream_writable.js +++ b/lib/_stream_writable.js @@ -86,9 +86,7 @@ function WritableState(options, stream) { this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) - this.onwrite = function(er) { - onwrite(stream, er); - }; + this.onwrite = onwrite.bind(undefined, stream); // the callback that the user supplies to write(chunk,encoding,cb) this.writecb = null; @@ -538,20 +536,21 @@ function endWritable(stream, state, cb) { function CorkedRequest(state) { this.next = null; this.entry = null; + this.finish = onCorkedFinish.bind(undefined, this, state); +} - this.finish = (err) => { - var entry = this.entry; - this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = this; - } else { - state.corkedRequestsFree = this; - } - }; +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = corkReq; + } else { + state.corkedRequestsFree = corkReq; + } } From c8ad127abcf6558b3509da6e74c2a082b81a3b36 Mon Sep 17 00:00:00 2001 From: Brian White Date: Tue, 20 Dec 2016 02:48:18 -0500 Subject: [PATCH 106/144] http: extract validation functions PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_outgoing.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 5a9e34bedd7903..1e612316f4c7ee 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -7,6 +7,8 @@ const util = require('util'); const internalUtil = require('internal/util'); const Buffer = require('buffer').Buffer; const common = require('_http_common'); +const checkIsHttpToken = common._checkIsHttpToken; +const checkInvalidHeaderChar = common._checkInvalidHeaderChar; const CRLF = common.CRLF; const trfrEncChunkExpression = common.chunkExpression; @@ -312,11 +314,11 @@ function _storeHeader(firstLine, headers) { } function storeHeader(self, state, field, value) { - if (!common._checkIsHttpToken(field)) { + if (!checkIsHttpToken(field)) { throw new TypeError( 'Header name must be a valid HTTP Token ["' + field + '"]'); } - if (common._checkInvalidHeaderChar(value) === true) { + if (checkInvalidHeaderChar(value)) { debug('Header "%s" contains invalid characters', field); throw new TypeError('The header content contains invalid characters'); } @@ -350,14 +352,14 @@ function storeHeader(self, state, field, value) { OutgoingMessage.prototype.setHeader = function setHeader(name, value) { - if (!common._checkIsHttpToken(name)) + if (!checkIsHttpToken(name)) throw new TypeError( 'Header name must be a valid HTTP Token ["' + name + '"]'); if (value === undefined) throw new Error('"value" required in setHeader("' + name + '", value)'); if (this._header) throw new Error('Can\'t set headers after they are sent.'); - if (common._checkInvalidHeaderChar(value) === true) { + if (checkInvalidHeaderChar(value)) { debug('Header "%s" contains invalid characters', name); throw new TypeError('The header content contains invalid characters'); } @@ -530,11 +532,11 @@ OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { field = key; value = headers[key]; } - if (!common._checkIsHttpToken(field)) { + if (!checkIsHttpToken(field)) { throw new TypeError( 'Trailer name must be a valid HTTP Token ["' + field + '"]'); } - if (common._checkInvalidHeaderChar(value) === true) { + if (checkInvalidHeaderChar(value)) { debug('Trailer "%s" contains invalid characters', field); throw new TypeError('The trailer content contains invalid characters'); } From a760d707ad43cb62baf107332472b9865549c6f5 Mon Sep 17 00:00:00 2001 From: Brian White Date: Tue, 20 Dec 2016 02:50:34 -0500 Subject: [PATCH 107/144] http: simplify boolean checks PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_outgoing.js | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 1e612316f4c7ee..262b1d9f87e4d8 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -239,7 +239,7 @@ function _storeHeader(firstLine, headers) { this.upgrading = true; // Date header - if (this.sendDate === true && state.sentDateHeader === false) { + if (this.sendDate && !state.sentDateHeader) { state.messageHeader += 'Date: ' + utcDate() + CRLF; } @@ -255,8 +255,7 @@ function _storeHeader(firstLine, headers) { // of creating security liabilities, so suppress the zero chunk and force // the connection to close. var statusCode = this.statusCode; - if ((statusCode === 204 || statusCode === 304) && - this.chunkedEncoding === true) { + if ((statusCode === 204 || statusCode === 304) && this.chunkedEncoding) { debug(statusCode + ' response should not use chunked encoding,' + ' closing connection.'); this.chunkedEncoding = false; @@ -267,7 +266,7 @@ function _storeHeader(firstLine, headers) { if (this._removedHeader.connection) { this._last = true; this.shouldKeepAlive = false; - } else if (state.sentConnectionHeader === false) { + } else if (!state.sentConnectionHeader) { var shouldSendKeepAlive = this.shouldKeepAlive && (state.sentContentLengthHeader || this.useChunkedEncodingByDefault || @@ -280,8 +279,7 @@ function _storeHeader(firstLine, headers) { } } - if (state.sentContentLengthHeader === false && - state.sentTransferEncodingHeader === false) { + if (!state.sentContentLengthHeader && !state.sentTransferEncodingHeader) { if (!this._hasBody) { // Make sure we don't end the 0\r\n\r\n at the end of the message. this.chunkedEncoding = false; From 32b6bcdd837058eb318a7ae14204748b7377fa63 Mon Sep 17 00:00:00 2001 From: Brian White Date: Tue, 20 Dec 2016 02:53:47 -0500 Subject: [PATCH 108/144] http: optimize headers iteration This commit uses instanceof instead of Array.isArray() for faster type checking and avoids calling Object.keys() when the headers are stored as a 2D array instead of a plain object. PR-URL: https://github.com/nodejs/node/pull/6533 Reviewed-By: Matteo Collina Reviewed-By: James M Snell Reviewed-By: Fedor Indutny Reviewed-By: Benjamin Gruenbaum --- lib/_http_outgoing.js | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 262b1d9f87e4d8..67a8d6f8bb8233 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -209,23 +209,31 @@ function _storeHeader(firstLine, headers) { messageHeader: firstLine }; - if (headers) { - var keys = Object.keys(headers); - var isArray = Array.isArray(headers); - var field, value; - - for (var i = 0, l = keys.length; i < l; i++) { - var key = keys[i]; - if (isArray) { - field = headers[key][0]; - value = headers[key][1]; + var i; + var j; + var field; + var value; + if (headers instanceof Array) { + for (i = 0; i < headers.length; ++i) { + field = headers[i][0]; + value = headers[i][1]; + + if (value instanceof Array) { + for (j = 0; j < value.length; j++) { + storeHeader(this, state, field, value[j]); + } } else { - field = key; - value = headers[key]; + storeHeader(this, state, field, value); } + } + } else if (headers) { + var keys = Object.keys(headers); + for (i = 0; i < keys.length; ++i) { + field = keys[i]; + value = headers[field]; - if (Array.isArray(value)) { - for (var j = 0; j < value.length; j++) { + if (value instanceof Array) { + for (j = 0; j < value.length; j++) { storeHeader(this, state, field, value[j]); } } else { From 0a0c190db57754bf00292636960d8744f95f05db Mon Sep 17 00:00:00 2001 From: Fabrice Tatieze Date: Tue, 27 Dec 2016 18:18:58 -0800 Subject: [PATCH 109/144] test: use strictEqual in test-http-server PR-URL: https://github.com/nodejs/node/pull/10478 Reviewed-By: James M Snell Reviewed-By: Italo A. Casas Reviewed-By: Colin Ihrig --- test/parallel/test-http-server.js | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/test/parallel/test-http-server.js b/test/parallel/test-http-server.js index 49283d621cecda..1870bcc7170726 100644 --- a/test/parallel/test-http-server.js +++ b/test/parallel/test-http-server.js @@ -16,23 +16,23 @@ var server = http.createServer(function(req, res) { req.id = request_number++; if (req.id === 0) { - assert.equal('GET', req.method); - assert.equal('/hello', url.parse(req.url).pathname); - assert.equal('world', qs.parse(url.parse(req.url).query).hello); - assert.equal('b==ar', qs.parse(url.parse(req.url).query).foo); + assert.strictEqual('GET', req.method); + assert.strictEqual('/hello', url.parse(req.url).pathname); + assert.strictEqual('world', qs.parse(url.parse(req.url).query).hello); + assert.strictEqual('b==ar', qs.parse(url.parse(req.url).query).foo); } if (req.id === 1) { - assert.equal('POST', req.method); - assert.equal('/quit', url.parse(req.url).pathname); + assert.strictEqual('POST', req.method); + assert.strictEqual('/quit', url.parse(req.url).pathname); } if (req.id === 2) { - assert.equal('foo', req.headers['x-x']); + assert.strictEqual('foo', req.headers['x-x']); } if (req.id === 3) { - assert.equal('bar', req.headers['x-x']); + assert.strictEqual('bar', req.headers['x-x']); this.close(); } @@ -75,7 +75,7 @@ server.on('listening', function() { // you set server.httpAllowHalfOpen=true, which we've done // above. c.end(); - assert.equal(c.readyState, 'readOnly'); + assert.strictEqual(c.readyState, 'readOnly'); requests_sent += 2; } @@ -86,19 +86,19 @@ server.on('listening', function() { }); c.on('close', function() { - assert.equal(c.readyState, 'closed'); + assert.strictEqual(c.readyState, 'closed'); }); }); process.on('exit', function() { - assert.equal(4, request_number); - assert.equal(4, requests_sent); + assert.strictEqual(4, request_number); + assert.strictEqual(4, requests_sent); var hello = new RegExp('/hello'); - assert.equal(true, hello.exec(server_response) != null); + assert.notStrictEqual(null, hello.exec(server_response)); var quit = new RegExp('/quit'); - assert.equal(true, quit.exec(server_response) != null); + assert.notStrictEqual(null, quit.exec(server_response)); - assert.equal(true, client_got_eof); + assert.strictEqual(true, client_got_eof); }); From 80e798e3243136d3b63c0243e8d66a40aca06257 Mon Sep 17 00:00:00 2001 From: Sam Roberts Date: Thu, 22 Dec 2016 08:50:33 -0800 Subject: [PATCH 110/144] crypto: use CHECK_NE instead of ABORT or abort Use of abort() was added in 34febfbf4, and changed to ABORT() in 21826ef21ad, but conditional+ABORT() is better expressesed using a CHECK_xxx() macro. See: https://github.com/nodejs/node/pull/9409#discussion_r93575328 PR-URL: https://github.com/nodejs/node/pull/10413 Reviewed-By: Colin Ihrig Reviewed-By: Gibson Fahnestock Reviewed-By: Ben Noordhuis Reviewed-By: Anna Henningsen Reviewed-By: James M Snell --- src/node_crypto.cc | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/node_crypto.cc b/src/node_crypto.cc index 7b3bc406091494..8e1319269e8ed7 100644 --- a/src/node_crypto.cc +++ b/src/node_crypto.cc @@ -701,11 +701,8 @@ static X509_STORE* NewRootCertStore() { X509 *x509 = PEM_read_bio_X509(bp, nullptr, CryptoPemCallback, nullptr); BIO_free(bp); - if (x509 == nullptr) { - // Parse errors from the built-in roots are fatal. - ABORT(); - return nullptr; - } + // Parse errors from the built-in roots are fatal. + CHECK_NE(x509, nullptr); root_certs_vector->push_back(x509); } From bce0013dd8a4794fe5370d3478c60695669e5217 Mon Sep 17 00:00:00 2001 From: "Steven R. Loomis" Date: Fri, 9 Dec 2016 12:44:31 -0800 Subject: [PATCH 111/144] deps: ICU 58.2 bump Bump ICU from 58.1 to 58.2 No feature changes. Bug fixes. * This commit contains the actual ICU source code and data. * CLDR 30.0.3 ( was 30.0.2) * Timezone 2016j ( was 2016g ) ICU Details: http://site.icu-project.org/download/58#TOC-Updates-in-ICU-58.2 PR-URL: https://github.com/nodejs/node/pull/10206 Reviewed-By: Michael Dawson Reviewed-By: Jeremiah Senkpiel Reviewed-By: James M Snell --- deps/icu-small/source/common/locmap.c | 2 +- deps/icu-small/source/common/ucasemap.cpp | 5 ++++- .../icu-small/source/common/unicode/uvernum.h | 6 +++--- deps/icu-small/source/common/ustrcase.cpp | 3 +++ deps/icu-small/source/data/in/icudt58l.dat | Bin 2680800 -> 2682224 bytes deps/icu-small/source/i18n/digitlst.cpp | 4 ++-- deps/icu-small/source/i18n/scriptset.cpp | 2 +- deps/icu-small/source/i18n/uspoof.cpp | 7 ------- 8 files changed, 14 insertions(+), 15 deletions(-) diff --git a/deps/icu-small/source/common/locmap.c b/deps/icu-small/source/common/locmap.c index 0d0aac42e15c19..1dba67a092a925 100644 --- a/deps/icu-small/source/common/locmap.c +++ b/deps/icu-small/source/common/locmap.c @@ -1022,7 +1022,7 @@ uprv_convertToPosix(uint32_t hostid, char *posixID, int32_t posixIDCapacity, UEr // GetLocaleInfo() maps such LCID to "ku". However, CLDR uses "ku" for // Northern Kurdish and "ckb" for Central Kurdish. For this reason, we cannot // use the Windows API to resolve locale ID for this specific case. - if (hostid & 0x3FF != 0x92) { + if ((hostid & 0x3FF) != 0x92) { int32_t tmpLen = 0; char locName[157]; /* ULOC_FULLNAME_CAPACITY */ diff --git a/deps/icu-small/source/common/ucasemap.cpp b/deps/icu-small/source/common/ucasemap.cpp index c0d56c28731d1f..0576a26ddd1c86 100644 --- a/deps/icu-small/source/common/ucasemap.cpp +++ b/deps/icu-small/source/common/ucasemap.cpp @@ -200,7 +200,7 @@ appendUChar(uint8_t *dest, int32_t destIndex, int32_t destCapacity, UChar c) { return -1; // integer overflow } int32_t limit=destIndex+length; - if(limitQnZZm2yg7iqkao{W7m|msDQnRo)sM zOXgFeEsAm`Grv-jto&Jir6$>Sa(<;F*>}rbwS-kIJ=STP=(MTObf?Yqu&`>*?uaxdb?ijx(pgHu$@@=QNCIF zo(j&ps@|}){d2RmUH^6i`wZ55*Uq<2)yI8cU!&>C73`f|;zVO^1K47C{ z<$X3XHVtgVG5@DWNu`wVbh1b5<@(z<+tsV$!5DkIrEGju*e4=?z#ED#OG+C9DzuDoHhtR>wWtslFdTirp`!i;WvG~;V(V3R3g z=2*M$qv(WB+>2d`_mXF$Cy135)daPQT1~B?)>hBPj|zzjFYLP&<_pORG4_z47F0`x zEQ%lHez$bkb3yzy*8V}z2(K~rK=HvidzJDn;+KaN4;mT&e!?{Ol=#D86@t3E|A;py zJPTXqKJLEn+2uXsedNv?{*BAnsJthzX}IyVx!5+&Uf#bcu%eXJXu2Pm^}s-vS=8yuC@m(4@-NtOUijJUr7hz>=7>HfVpsv7kzp z?g=AJPlM7z&m=5N_{&uyY4!wrf@ws;dXY52?)26781wwvlosAGaF{g{j3H|V<~A;H zZElE`6YK?QM0={acLkTSjx;Z|9uLlIonYwY{n2D1WuYK&@wHs)rNSZJ%56DIMJRr_)n)6826@#&$O4% z_f?=vk3g3bfgvjgb}}SziVp+Zd;Q&Seiw_fBAspNUPG_E=}Zz({#~ zMyH$$bS^MPPjMsDUMSzXz(V`LE)EAy^Q*uZroho;2Cn6HQQ|XuV!m9F#u%}I11TLi zkUoLUz7AaKwSoDa6a7B3C;12FHYt5=19R^n*V=aEE|JGrE|KTs2vwQN?2P{w2GXSX z6H_80=%*?XxsCO^X*bpx>9-l%r$o%nWo$n;*JZ|8YrD)i|J`nE_wGN&7r7?cgDV*W zR|!m@N?>54gE6Qfg$Ms`22Qh^m{6KMXJY@=CO(;D|1fA(q%pHqV*Vt1zLo<5hcO_} z*eIrL8ynR(#+X*$m}R8jwk+lwhq3&P<2>oNop%_c_T>;#LQ^98v(0W>|ELi9F0_Tj z7>6)SNr|{dRtudf;wRgS#q8!d^Cj-i^)uOB{W;h7tjw1vT2Hpe`12)xAMuFg?&_lm zW8>~>O2o-PzkhPQTmIkeCvtfC5+~Z{kloce9A?gT?U+oy#CB1a$nNT;sEbT0U*g4> zxL9L3F7|)7yB%vPPKVpE?*@3camSEv|elOM_=U&JgdOy-Q33(ZGC(BJEc8WSv?9NZV;}rK$zOz3dEy{zyj*Pt< z9qTarrhGMK@8lt6$F@WhnG%u6CAg887*(9KtHq;?!K2uw7u(p?USjtY`+GGSL>r9_ zqUW*F96B#>{f$mdI8ElzCb>EXPFLr^ePA5!cy9Zs(DAw7_20#$qC$6x_ov$Pg~$bF zDC$qOJ9yeLPKYtjm2B3OukYsY5u2GqKN5?l+M^4k2JV%`aqo`)IGugU@!Y?Yc|(8C zoyi=0N~ZW}s=Z``|Nq2tj@rqoH758Ir!Fe=r^qBWHistV?iV<~e$g{YbLhT({-27LpW6$y%$dQ)xPx)V_4_3-6XPtLU@&v&iNJL+27h<+tl;5l4qXv=wlEHF zG^ez`GCFV>j7wvj{wv1pBd&<+pWCbWdk1dU-aKxN^NcRA|u{dG74*?sj90QkIm6WOmq2O%6OG88@79mm2p2H=bBL%^n>- zIB?Pj2W}vz_;Q*(kN;iM1@<2mdLedopv!8y824IA#Q*EwdzxU}k0}u^*v6^82t2w< z@aQ_s9N%!l?CP7y3W4z|#I`2wYU@BB;|f(tN{I;P z)n$FUw{GC6vM4Vq*;>wH^9y!gLir-_iV@>hl_6|n{8y1aeR**2;84ETQX;m*njFf@ zq%DED8!s1^X)Mg3`%HyJ%Ncg3zclyd{-n}@$3`($dRacGd2^NI5cVfM3*1F3I4Kpm z*efj+0~b4|f6iV)IEmHRW`|NOFveK=-sQ0{T)WGIRaFLZ@r)fWpx-OnZ>p~X?N2mS zDWsVE$|h4v#B*l!kLkJNb56r1)90K9UKJQf=P-wb>VaFd5CgQ~?rE;Hi8;&_DWn|! zzoV+bxI5dC zDs9-Aad6X(+bD8+>@*H?lWAJuTAJv$jQzFubA(0I_JJe(f`*2H<%WR^Yh1(v97!+( zyr%>Qj$j`<>&4EOkRj~cIJKt8l!)>S-Yl}bBZxC-iVOXh@+J4{CdO^c5l>Ou zM*rt>8<&QI?ZsUd#&!8NF)Z-pTAt473{o~YJ#ZQR_xhgVvKaTsHCxU-@-nGp;ArnK z8Dqi&)lu9ts?ykMQXX(-D|0~KS}NxmM?SEP%kz+JW3`9eE2>hS2le$?+=TyWG%1%j zW#2L(ukYKyjpAfb3EaTHvX2Co&nOATvR~=N)73T_*4nm3-ywU2-eE<~Q_or+Poq34 z>v9;6vjktx^sYsqbl*VU-dTsaR~ zDl&=V`nqKL}hmFV;rg9b}Y{(F406~llXkL-Bal* zckOHMRl9>mKTk5o!)G_Q`B#Avz6xB|zFdnA46@eNA@_2Qu&-@-bbTI9Yi;$#quKWN zio6@75QFTt6bjryIWtO$*u#K++nxwFx9x6=TQm^%Vht)W`ff|5sCO%iG3pcXP-In;68KmDfBj#5w+HT9Q;u~Qv(9|8dB(VRS?tSUL zJ#Y})1NTDCuHWWZ?SLjW_}t^L6f-vmBpuX?D1AB8(8AS(m8f-(mhV|ABpz@H`^Y%Jmx0+lbfa! z1A5g?;+HwR9ksYpBL3o_Z#n-89P}-xs>HfP{<-!t@r~Hm7d&BD$y{ul#_4~z@sPRp z#Nux}iZ_qRK65_YS#*B<+Xa+D(B1SW?BcWyVg$|re$cKX$500JC{XG z*LEf?)K+NqVjj5Oy4Gsjv|ZYMZEMsK?MT#|nA6%-?Re5}+9R!*X^d%sX}!tj)=fuD z7fe@8`Q2sR1>!=?yTjwmMa>n>>3(;rxj;lS^LH^=_PEt)mA zwU(!ar@W(yqjOS`=+7LJ5{1KSEod!m-S4Pq^~6=THn3KVUhKKyXkmTe_}kIR+RJ*{ zGr&6BI?+1S`i1pF=R)f$>qcv5=V({k=&PQ)o*~Y0PT|~T{oJRd&cGK~f!)}LBe;s| ze!kqquegKX@fx9dmDJpDA^}P8A|FcOJ(NXxR75ZI#Q+S(FpPvBV=x|>n2f2Ij#=;v zzRbfyEWuY;g>~47E!d8o*n|Bzgd;eH?{ErdaUMV73VwzwpORV{W$_^@<0Di@E!54Y z>==u*X^!P%aj~F0gm0@`UmkUf>npK*_J9nqWmR!jKEO z5e*08;etm@S!mzjUrD(J>#z}9upK+G2m5gdM{o?^;S|o|Jbr}nqrfZqm1#y^m3fUc zex{jC-ohQ+htcP;%%}Mkf1u%+Yx!pTa6OG7NSY+eVQN z7&}Uk>5}P|>6MwcfInxW{4^Anjm2dq%Pb``g*0|hhAb!BD$2IXWHs4VQ)X?Mb!FC< znJTl9k$whDlMPK}Hj~*xX1dJQGTV~IMB0;`Wm|WdJ!STm*;i(NnFD1GCXJu!`iD}C zLS~*k-%T9Wr;3{M=gELmt3k9K*>1 zTz_MLGc26PB^ZNWC9`2{bc?)$`*@6J;78HQOY$|8g4}4ZAq;jz!MOPxWCGmC3uEkp zWKkFsO)kjwH;PiSA%!$1RE8{vin6_`%xW^L%d9E0w#>RR>&r|fje}@JHbo1xMtd0h z?d+#8X4;kPiM|*JW93lNST~#;jWL)2W8EZ~Su&@R#?eh9XUVoXvTYu@NVYANxm>1y zg)CN)#sF)`^|IbfZkP2gavzMHACh@k=24l)NaO6CAWz|(Y`;hvV_zb#%C_vB^eZ=I z!!6R-_%?YD4`FQdlza|j$FF3*mibmDZ)F4PHJN6aR?-;5Mux!{-%dus5m@H&mq6i0 zUKpplAXya2NP#gxS+W9*NmM3{fvS?#QClw8CmW$DjD9UhW7~AHEsSkCl3mdgMxVZZ z3ZqkhaxjdWWjHw+V=w`eVDy~$&8WdAdMZxkuG>;dw$XwyC7Lq*2!dwtjm%W3d`sJ z$}CiaF@c(-aUaws>!T5x!WgUt*&0T__A)!l>@2gZ%W(r zVO;moVoSmsmX`WricMuRcv zbMlp3`IfZOrWE1GU_%(}h=Q@5LuMRlY@a~7Wm{gE`Q^HTWKr3cEVEP*`TUVoSmsli&tyK&Nx$+^HoPK@oxLWNqB(<^ zNn>JGG6WH_JvV9ejUpYgP9WVdCXrXR$=oAzA8E|^0C^b4Wcx{( zr(~WXjh&q%FUtA~c@4()H_6+$2V>_CWj@YHzw%TzJd^oc=1ZBcWWJX9Rwf_L2L{w+ znq^vL+GK{v43il_8WXmYQN`rr-yv7V$xM*xlIfP|m6=y&ewl@37L{3CX0psuGE-!h zky+MHVa%o+Sy47tm2K6?nzF5~Y^zT;l5I_8TQf2p#{JTk?1--D31jTOt${vjd3=U+p!D#V2pE!H13JR z^Y3wkHbjY>@nJ&_py_d|7!m_=%%w(CRNMnL2WLZ=|WfKC$aAvoV(~UPJG&$suE@M9^BQSvluh1}ZFgke zlX+j}Lz$0dJ|)-a&JA{5^6G7iI@*#YQ{8+=@c^{W^_zu%*>cC zV>ZU@ia8o{A?A9_{g}UEf?{K0J+aBLAI8>+Z64b_c4X{lv0ub4iCq`FJNDbyA7Zm( zpYj=0xFgn4z){BWu_M*d#?jL;)G^L6gHOTMI(9jZJFYtJIQR(2nai2rcNTD#byjmW za<+5!clw<(oQs_soClm|oVT3MoR&C8T#2}vW*b}e^pcO73z9Y^9ZWitbS>#|lHv|`yWGj{%I-9GJ9mHgSoduAS8m;X)P2eQTLt%@Zq1Y1 z9n zEWuK&z-p|+25iC>?8IIiz)_sWdHjgW_z6Gb27bY>;@}p0tp5q+3;cz*P`pZp1wjZ# zC?a4-9z-J+PQ=3n5Aq^E3ZgKIA{nLd9^S_XD3400ifUfPpHZEnCTgQD>LV46@d;X@ zEjpkRx}g{Ppg#sjgW?>XoeO@M{Bf02XsLX z^g(|N#9$1=NQ_1X#$r4sVlt+J&jm7OL0~=>`ndi}D3;VSW^BcF>;NAq zWbDO3e1oGnj_+^^XK)VZaS@lm2M-z7kd0fojk~z-i&ZinQT%~F@dAJ0Z}DWiJ-%dk zUakQ6Tp}YHvEcKGjCk<*M1}`GIe5%qL?3IE5GHBbw6P!A2z5NT+NW@v$Qv_@OB zM@Muuc}K=f%*K51QAfrSEX8uH z03Uf|@R3Kx2Jn$b#y05SV~>p8*oOl+f^TsgeDslV7C+zu_~;{pk3KT4;u>z^R^ASr zV2V3XLy0X@HgJzAE^113==E}LNG!Rj$DXD9z-J+PQ;6|JM0DgB`H(zK0ZKs zR0JQpWK_k+r~y8D$*6;RNJS&0p(&c71zMpE+MxqFp$klkTRC8T>AWrm}4@J-6wxFe3waQG}wRx8;WeHlNxI;z5nQWJbG8#@XWu~J>xR)|73ZkgEG1F1R@;+sG5jo4z zGWsLRk5LPCP;ZTr-B5fw%TXh;4T~Mn30=@lzdXzFz8X1*dKgB)kFg?gwj(L>3rc~x zn2*Jx-fTx~@Hz(E#NrmwZ#KLBiSoMma<-#EWohfsy1hw~pw)7P9j1S1sT zh!o=m1Gp*kifw`eElOD&rNk5A=nzw$vJ$G`Bh)~huSNZpju??KhjY}NhE`(k98O9n z%5La|KH|X~M|>xqx`D8AnbK4HwdLCZ!OcE_8hN!D7l~_zJ7>HP&Ghwm`>D?8aUk z#5Xu1#xLT^WmDe5ZE#;jAag&Z^)Uq^G5YbBWl~N;7N%hq=3oI9VF{LExw!i!N3)Z1FZPRkOIbWdc>k(|YcFQDvENBFbo5h>6(x;wGUex@ z-B&zL1?7B(SWJG2W#XXGV*^cF#FMW$#ND*+#X(VQ1rM$hlqYcp=f$Th9F>BvQ{Dp4 zWh)$A5}#21fj{vAf8!1Qfhy{)Wcb`0xZwJzm5u>wJr}hbUgSqX6h?8BLJG>D94ewR zs-haIqc-ZIJ{pM+Ry*=**m5a$ z1t|-oD2gK)DJUySZg9jpYf#og9W+2=G(mIGZi6F_zY}FQ^g1eGV+~^2ZKj=f<4+Ag=!!QD)@F~V(JSO5ZOvV&U7wTq5p5TR)i#I9R z%Y@2h)n>;(sTV0P;R>$e8nST953+-uff0FaceMdR=;cs2IDXRnV7^D zqVGOV*vOrn(m6COFm@g-*6ehY3180gDy*a1Ch=e=Cvg{xdvE}UMU`ES#?hxKf4~L& zNY9_hpT+83%=udp*zhRj2~lsKqe1vN%8R&yYhv|2uJ>Ka2Y4bL z?Bi^`qI?T&o04r6RrWi+Hs{*LU3`DPBf;sR@*y7zpb(1Cv4qHXfN9lau{P?WJ{loS zOfgoqr|gKX=*||s$-d&x1H532q#TVgG*2KiMU#U(EN4;9#ym0oAP2FGl6OxB9W|mh zP;S9C=xnr`6x00N63K_SC62IoT+}+mv7M#7fXiaaAx8)2P0CxigZp@h#|-dH1b@RW z)a{DfB0l(ryC9r064Be0Y$q9yM6voC-gOnCEP@g!iPCrv@8biye<(gU?D*VVk416p zup=S74OKf@>|D`iJ>|-HgX{n zc@PaJ5{2iO!xfxeg1R)s0l^7xdyEs_kVW1w9CPeXX+_x%9ncBg&WNk-dH6J?Y=LyN7ORb>Zj?RHX9o{Q z@#je%BqLcIjWL)YYJE@BbjmO2F_&B@7Jl#eBzy(s8f>6>i>{t>lvf+>XZaw$!4Z6m zlQ@NQIFC#C8P{bj!806Xqn%22 zQx=j^%Xo}^_ycDwg3^vCIK=cHIMqJN0w{zc zMpLYK_JgBfco~+$%wE4Yem+`=8)$74Lj zGrYtryoI)#HwU|Ub3lf{jwr;4k1z2Qol2R8W=I!bUgFetq3j{fUgB~Lpd5l>BK$Jv za17-*WFkwnyX+X^5tQ?>5MN>$zQRhZX1D9aUzZ&-gZHwyAK!@SR~&gFPE(%61$tcG zEvC-k8>W+29O=&cG(N^tJjYACrjsJ7{KV99?cr%c^!tgc9#5HwBzTZd9RGn-$Nc0{jND0NB%^ajhncQJL1AMM-BTk%9nUeZ)GndPUE#i zRKCvL7tCTPav@R-y3S!IP`breMp=-usJL+5(I6^?vMkD>;$9`YDk(N*^L9Ti+fgmL zE{my1LsK*Z-&JRG*xf06qd!{>CWngf8xEg!4CT1JIX{!AdxNLiEE=YXPB(Zo&7oWX zz7@H_G*(lt6PIsrvbIs`*e#-OI!?cLnDQu2;3UrA9M0n+uHY)N+4n8-E*=QyFUC8b zeZ1pAFhch!*}UWVg>TQ|SxjQlCw3Z(#aJxGVi}>{VxvkdR>4O!)gbGL<}`;lq5K3b z(MGJh#UXU3>`kx!`jcCZT&j5l%d>AgYDO-jaU~01V;we#R<}8@i*RE#pp)|BnVI-~4P$&Y!Aaxmp^5&pn< zx}luFh?B^vqTK_Yt_0nS&5D?N9J;D=nQgDf7waZ&G~&}Br+kd3;>km9q`xWOKt0Gi6j=0Hj~pe`_adod5RW9}MIjVNX}o_>$u3V; z#>c3Q24dY~hU`w+6MfMi1BK;@WAcX?l;bb~naILaOv5Zjn?ufn@!|fz|EDS=W7g{5 zKXH7k`FHVC*86w?6F-#9g+wHy5}KeFw&54NKr}zWD~K{^fOZ&w37C(saSTro#ZR~@ zpe_0%6Z3H#_tq+Yw~HSORYp1nVH{TB8=S)pyhJEJHSysiG)FfK$3)D;O6WL(OSlgU zKZS@xcPz(o+{L$iBVV0wdfRXE^D0Ae79o7QSQ3>`AMLOdr*Iul5z4n!@hE`OsDwIb zhas4XCD?*vxQ@S(hi^@aqbeF=IKIGYKVSCa6mH^A*m!?m63x&bSy+Uxu@~RtXFNwR zZ-bLi3EeOchj9f@5yTtH_t6~Vu^P`%f;VB6;cv*7_85doSd1Oec-NDT#n_3XxQJiy zCxUrPkbn}XiguU<2QRd3(Fdb24IA(i%)C&=q7cfX(H^dUFA6`7;65~-$D@%K$*6|5 z7>-QL!g6fFemp`Ro<9qs9O|J124fSl@dOG_I3>{yi?9c;_SfLqlgDa#)I$q&Mt_XP zG_1rfe2-grj$j^DX&8zH*r;E4;b>s@r+mXpDJq~cs-Y(8A{A+9hIF(=M|4F`lzYy< z^C1UfC`KX!>uKI?vxt3xhwqghN-~bNe7*660&f^lUA{)1G zkb&-y_wg9d@De}M{+d({a|!W)Wg8g=JEGt~!ePJSc2nd$Y^1V<}Q^>NYfXb+b zny8CZq@fwo(H0%i6+O`x12GgMk%4i@L>8uD7UuZ*GMAk#AeUe{R$(nRVk`Eteh0Y+ z2XGk2a1v*59+z+x*|>!}xR1wphL?B^;IC5*HDgfYG6Yc?1+K` z32-AX3Zf{Ik%F?QfXb+bny8CZq@fwo(H0%i6+O`x1C0(tF%lUVhfHK)8fIY*7GMdM zV-?n7Ber4(_TT^x$nac$jPpIYyV#@(Gh^%pUuyR(4*=g{9nYuJxR9FGVO z|0{Gg|!MF7%?GiU3gMN zhX`M;31Pi*HHvuT58D#pzlOw)of)AMJ!O)D*w_!g;WQ2ALs~6TF)Ek-|RxEsC$b*Ohp$o%58+^PB>2Y#tCD#jC2Jf>VsjhMzU?P7YyjEwm# zM#Lb3r9D{K*w0eG{*wRD#v!m5yu(FHOB*o;tX@f@jI2J{m#nH zI?krfF3thYPo2}8Upm)24?0gduQ?w%|8eGt^Tnma)re~y*Dr2N+!t}H;`YUzj?0dF z9H+)d#TSgP5MMXGs4vCWD!voHS~)y^eEhWddGX8R*T?UOKNNp5{$hM~{Jr=;XKk;1RwZz{O|4IyTIb4NZWnCY;nz}l;`nmkB$*wuB)h^xjjq9}Q zXV)XwKdy)*cT(}B@=0}*S|)W%8lE&MNhGaE+M4uD()pyD{Hmqqw!8DXOSwOCr~2Kk z-F@66+>_l4++VvZ_^SDKxxeK%E+4qxxWhbgo}!)#9^+q@_wtm8X}#r~ z7Gkb$er>s=^;Nr?Mq7rP?B+VJf+pF@nqF(YO%H<tirLax0wXmEqm$9|CUbD1T2dlYlLoJgj` zwXQl(4b#SHOH|Et&NfGxq^(tL)`7vxd1}sWxn$Fsdn_|r6%;~1O<}zA@DZ={PcGOlaxGaAW&>dVc_?~U4 zRzcgxhrf?)5y4aaYNooz`r4)iyKE=af!4!ng87mq%39RAN3jLFtx>^o)@*B4Ex)yC za82!&<(V26Twlu{yhE)Wd_Cm1kW$v=)-=s(D;nIE4C3$&|hTdjq5QT-

hcE#N1u2tk$#@52|XpXda`^GVc%BWl6K-wKY;(*orFkZEjPr zvM0Ea`GBQz&|q_4^U9D?%HW{${F%IOL+S?AwpO!-Sx#EY1&uVHvM#dbH+2l^8`M3h zjOD6%j9IbRgOV*ro-mIII%R@WQ3FpsS;8% zq(Mm2kX9ibLwbaq4Yr5GhPXoVg%l0B6ns7ScJPDXXTg65YaziQsxq8^N?$OjxY-+2 z%5*2FnVJyP-7G>TSt^<`%}-6YO*2eW&HZgV%+-UY*_s8tv|bH5W$I~}V7h0SW3p=F zEUkkw{MJLJX_lg<{+7+=MnTVls;eV|7T8u3%$=a$Eo<-svQ8$$GuIl*ztG3y-bV=XKAjycV?$TZs4 z)U?sMBdD_ZmTjx;rg@ENfn}*>yLFc>%R0lF*43JB%~s4ox2+pZhphLsbCx~UhvuHP z?WXm%8NqATs7f&<({B7T{+7y2ne!s~O9F~pX{dZ>?ZJy=FZ6-@O}91rJ}|>GBS;(6M7L)0rxn8{eOhKr?J;-9 zbQu>lUeEoRr;#c;Px2JhJmd7)lROoy5vQB9ZC|OwxehNn)a;TmNZgv{nPxlH?d$Fz z4A?qUA35ElS+v%j#OT?c!Q%OBPeI)+JmWM=*Ur~Fo5kw6o?pbGd7d*`^X_`@`JObZ z+EX0-((^zZSn4^cw$LXp^Eg#~#d6OfllilS4Ht>7t30j6l~tZ<>SK|(+OtJJx7w3o z($m&?ZkhFRn?2W5ZO0(pv6T+yaY1%|?xY_+>KU$T?{yIsk9i*J z$;Um(CQGG;sSO*6b|*c5iS+M18%4w^&p+~XMNdan+mxyI{n0Z{)rt%f z&n|fu*~X`?YS^dIv^0J8WlvQ#B7J%*{_Ia@-ba<^julU@c%JIFfAXZOYP??MXU{*H zUNxJ2YRe|*$8LJ~%Z%rK@gyf0+jLR-D7}>~f$dt9ZCR;vwNiCjHF(=dcyD={X*;@z z;kP^o++U<=P0lud)}mv}-@5ebzPIPOo=GF7jF>;-%!ro$H)FmXE1Lc4*{B`rA#Arj zr9{Qso@;v1-#n94ZTWDq=ZAg79(j&wL38xBk3E^1R-(Cn z=c&h`X@k;c|LHlWX6a}D^!P2BGgYMi?Wrr){_RQ7>epX+>~D{%ofxSfdu?2|qD}Ny zZ#->P(e@utPjTuW&tP@z7aw{5ws?Cq?$J)Kq8952Sbp z25CE+ij)f8E4uwdZ%>ODUDfN=4t3O*RP`QHIMl^^Ok{QSKGsKf^SU+r)sf{#?;BTP!urgilOiT3Wa(>qc$=wOlVQ5n z%Uf2{EA;X5hn4I1^|~}|?;^cte{XJ0E9@7e2YNSa-?kUYgS@vy-of7W<}zuiY5KXr z-m04Od{g6O^j6gWJ6%I7cdx&+=?T5qFzwG zyftm&Xr_0Ep7EJ?phbwBw4Mt^%oOi(adL{c?#j5SUQN6Cg@~KxU8N;vi9^#E zB4WBX#T5R!XIAfqeVWX8HFNT;SW$15cd|Bdj<__-+g9s5U6lR8+f`rwh4+rC#kJS7 zgmTM%5WVczUY;|~ikWM@`Se|Dz11{x|An;| ziMQ*$qs7z>-rjo1M(@;I`n>(#J?3yV%Q)ZxN>}Bs(oJu8*!!Vbd)r>0ddyo{6=#ln z)6}EFcY;Ucxe--IHq#4y=Uu7kXTJB&(e&NLIcW$#e4$o|<|T{}NU&wtHZPSwVKrnkNB zEo(K`N>5K0bAI(M(97QTrkb>JgY?Y1-nu4n@xJ$pHl~-@_`rKg=O34JG==^=_uAZo z^BT?TJ#VD``x9>wRWJC5_nt}ntc%F~!n;%)ec`>MZ+z*^uO*f0Kd*n$0iO?SFlg1# z2ct&#zsdM#%y(ns#y=WgWk>zI^CuoR#hX1DcVex_H8?W{edo`cX{GaJ* zCf^vdekaIxRn;!n*AEB#(o}VZ9vkZWTGJ1Q`(CQ*GJSV0U!G7cti66J&UeqG?{fLV zOlntg#N%6R%`?e4Y29Se+3RZ|&Ut->#apj$g?41NzTD?Ct8piK{ne{??+^N1?;G4N zvS0IlR|bq4`h3KLjJludhw}O=1`DmYFWLHj|8o7yWQbD5eQWe<#eI2I-J0z4s_Fv0 zKuMo7NYpRuE2!%Fpt3%X*;Zqsf8x@KmP~zK1z%xRzx1K6D=$=)eO6WdPSmL4`$oIc zMTAuKbr$`r`j)8IMC3=lByCS8J^3S_*`$}@InSiGsO1Y$wfQadj;$rTN;cyxq`eHSr|}X|dzQ;dEbF z@iyJ}N`KYLciW;}PS>+L_^z3?xYlA{SKm|pP&eOA&HkpnaTSJh#}DFR-c8Y$^z_x& ztXoHgj#m8Qac^Im-mH%=uci%aC`R`4xwHub^m+Y!A!fCazHE?hdVJi;37T6~vXt9* z&AhwJQXb0uX%qW>onJ$?=nFVDQlUYjU`!XxYtP)6i4|YpW_Q)7uH+E$jW}i8=G_FSj2ebEM2qWloSeS>`mE zU&x#%bFs|jfpnR+$>q~B&y(-w^MXG;*>-jXV?tRXnm zE;CwYoXjMdd1V%oSzKmmnPp{u7)Y0?zNk0Dm#;^bGEA-*Epx2QOqo+;&X6f&E|9rA zkS^0|xxAkI*8#Q-zHJH|Uw5%?rZ3jtm*ac4+PEHzj^`ZyV0j(3%V7@6JQ_%s>AcL# zGOx+}Mdlru56E}pbAH?2_#)?ePt7^6e$!Lg@rBISGS&OWiFTQ+fy`1OWac5?4QNbw zc|y*Fm&yxOK=!LGvw_SsnayRk4rI|RrGsqfDzlf&{xXNi9C=?noW&h{;9qCVIHGrV zFz*svU-)7|PRcX4>%OSSa^ds;>TgWAQ*kwL0j|h?=jHg9?~Bp&Pq_6z{a>`n>3@%Q z(RG$DL7e=;XRGz>f4ZI8oYU=<9QKYJ_JPc&GGEAiEmP&63vnNWW#&BYtPgVT?@*a` znb9)iWG0dCE`xCv2ENVNy-SXhH_*>zDlhvtlKF|ubeZjP(yw%u4LxM`l{rY}aG8FY z<78&ZoE}J*=_|RsQ|3OI-^e^B^Lv@+0_o3Eev}PYW!{i^TjuXFpUeC^kSbU1({d=)!(?L>&qI~)MZ)}=G`&jSXD);j?uw9m7dX#gfZ2Su^Uu?Mp|FiiAd2?=|qjIx(vioA0 z%VlnRBr4A3rQ*EocR;ouk$FPqX_@C`UY2=7W|7A^(@B>3p3HJGE6J=Ovo86s!wqd) zG$Uui&ojj}CKU2oPN?By@s{N{^Jh7asWx(ejxxJF7RBdr0Cu^4h-@Dz^HZ4NU3?KuZnPEPd7|Ezzi=B%$O z!sq*9YuBXPzc<+xflG;_|_Xa<1dxtKal;O%6uX7wM_HV|DJx^(46fz%k9HhZ@hDflo=y4 zJ|{J$s8~1OSHNFKwx^Q+J{`Ls<&1Yqj@MH5>makM%w974%N#D#PyTzn%FA-byZPV8 zE#@!u1-G~(cQQ$C{JG3oGUv)%By*X}RWjGf+$?jK%>7Tr+l9UuF>ry;9$YUx=Lso9 z6kEi5Is2slzCVvd=DZb(q1`xfXXF?cWL}YZUFI#B_hmkl`IpSMGEHI~yZazn4jcMM z&ULZNjFuTEGf8FvnMK7LHV>&LH&6LPxEA|jn%9-f6=b(6GONq1BeSv0W-?pJY)}6C z^5jj*xjgN~l*PVSXIF;*_v-ueb5{4}AAGWU;Qy?iJuGK6j~!O~GyZ4wo0>VRC&)>s z{+V+Qn#gP+vyIG-GP}v_Cv%w0Nisi|IZNi;oD5YK$%Ykw>Qk5aj#~X^_(tO#e&-vW z^W;Tb#Y>j|##`eXA>Pzqqb-;WLpJqI+`?_#!9Co^Lp;V)JVSN**Fs%1z;oI@AzPr; zJ+nWxEky@(#%oq~BYQ&Q4O$=4Mus2^5wIgSq7VZI;*bCrR8a#Q}VS zqd1O}_x$EmFCFsYG7VR89XIhS3e%p9JJi49A)cTVZGVxk@eefKjay(t3hTcvlzqjI7~n$CLs$mFblIW2lKE1q4&-0QWsGy!3sEdcN`BF zJjjbxv==2ypcLLiS(HabR6#Y=z#96l#d>VSW{B6Td=CGotUb)qIC3H;VG2&rHjSK# z*_ew3Sd69k3ajun&d_%~xd~gL;~Z`K$wN4TWB3lIa2A(Xe{G`np*x%6Chp)N9^)xq z;w=>8Z5hn4!iEso@&8r!*5OfHZ`}UOZfrN%&1RELvT@n$3?#S~5AF$W0fH9~4n=~y z1o`&h+TvOYgB2;Iv_)HrQ%G7U4#A;7`Q1Z8;rd<@ zA&!y@3UCFS1Vx}Yl!CJ0CRPPo4QfIis1GTi5-SKvP#B6raVQC;p)8bx3NVHIO4us! zCFKRM_&&SVG72l9DuMObO|TWV!%nD#x8)yd|pz}l4UpERppf_Z|02mCzU<72sSeO8lVH(VY*)R_T9H;S64Gzzr&F zAy5#T1cjj}6o-;f8p=W?r~);iK3te2{ak2qzR zDENUt*uepz5Drlg3odBN0+O)Ju`S^<%B`_!P?K^yY-i{SpF+3(A!a0Nsa0#x!5VWh0=VN)~NDiNa~2I9a4w+XtjkFYAXAS6Ly@Zp-W zB(^k^g>p~L?g4aM;5Vn4&CVjDwKXbvqQHIqPV3Te;|(xDq#59kdU&=qX} zb}$Ts5un!_3lm^6OoN#)8|Hz4g|Gyc!zx$}>tG{nfo~vl2M@boFYJdy@B?E`~M&cLt!|Kf-x{2 zCc#vg0a-8?zJvv^7?#0G_!`#22G|VS;9JmO4}1p);V}FNKf`f21;1tTa277WWw;7C za2;;K9k>s<@C2U2D|iF%z`!klC-{IbSRnv{AOyl75@H}8+)w~|u}$@d%%MDtg7J_< zpa>L)QcxDkLnTOt>QD>nLIapeo#`+WvS1F(gD;^E@%h-n*oD|p*fB63Cc$E~rBe-M zVwO`_317h~0;{oJ(u5-Gip(v-*FuW)EV6$6UnVKNr$tI118a$Ic&0u7Hh${;$OVqP z57N%%!#|50d5q2bNUn5NTe8x%s+_3sn>$#{|HHLb+`8bhi53@* zhpS3NjFc=5;C=rNOv9GUXJdykbzTSFeUhX?SA?w^^ZU-san&>wy&|eybrrQVA*d$| z`6v{d9!hmWR?$@!@{=t_lz(}`ocCF@7ueI?GIc-}O9@BrLNO|mPBcBr+9=mSHL&WSSuAcUq&QE)p zY4URl>r+=jxw-i4sjG!tO3Z%ha`@6$41l391!lquSPxs^TR01s;3nLINAMh81D|(G^#n5n zKrn>IWJ;+q6be9LC;{c52GoT{&>Y%8I&_0x&<_T~a2O2}U^@H{7Q!<43bw$vup9Qn z5%?W$LtHE)p2tHi%hh1}}+ed2Lr5k2|B$h9C2YEM^ z^cIrE-sSEhj(SpS-I1E`SWBwPV;!lwc(>fG7^^Y&bo>f;CvEx)x7$nZA-1n^m+)?y z_t7pbXN`M+07r}t!oQ<4QS&s z54$@iW)3yHH0Ls#*HgnwLtkrCpPwQ}S>Kq=zM;Nu-;%yneUIBOM@EKp@Oc^5$7hCh zxKBvj6?>}hJ^LHGUtl}mNilo97F#0%d-)~>PWCZ}ANBgeHzItp@0Y$``F`WO!CE+^ zLf{^2{lJ61g~K02RtZlF>=pR4^?yDi0$2Jp4zC~7;u zv!_Rm^xx-mIA%c9y7=k-^@A4qrv*)n@`^Yb(km!5V!i)@sPi!wLPi7?jMx};JLEvr zZvTHmrU(7(vp8sod#>B8Yz+Fz|8C50QTu{gyAH(PhH|Y9*8^d|F^$y=*y_% zLBGdEgcb@t8($&fc3h>gVVbJ2&*J&?O1~#`TXV7Ck$83vJU)ExbE&wQU+~*yGds7NmU)kiK4_}%2zRDArp5Mh9JOUdCplL|_jB!xuIPMV z{yF+ebRpNn=!VYr&Jm6b=ih#Ym}RlsW1sp>cPw@U#&2|taXMnum~%EY{*6I0Zt$Mt z)VvRSdl-F;eny)ybDsTzVV~oM?Uw7Q?YQG;tTEtn>Q$15WelSOS_&e9D7)?0c7Cx(p(4mY!Z3yBNqqP@>v;{7P&Fl7+2N2J?w|DfiXV@{2g{SpigGx^#IkHV%``0Bw%!8Y3tdr zSCMJv?&eDN5T6`l509qCmc}$=7wcg2mEdva>Gmx1VtZrj z67xp;dh*FzHL#n*(sLSTQ>wP+WTI-u#(G za_0X>=i4rZBZdTNn{1X6_&>^jg0w+4%8#Tu2D5Zr{#Lfj-R)*^;F;TzSxwq6ML-8> zf>eg-TiZy5`L8dXqr8L27XHl=>C$Tc56i2VBi2ktDMON>8YTTdLDJ;H(m}a8i(M`? zF?2vXAqi^#K``mc$%fDa-t~q!X25}M%v7hDzcn%Qb$9glq1)X{5&2= zvGN`CYSKpP-j{33*X10>sE^?(nH8nyvLvS&TFcj@CGY*S@>ate;|(%)NsHx*Qc?ML z+Dgn{YTmLZu)@3BXG%}xwQ>zvk@v{c4b8=|7jED1`aJvD(2NgJ{ch;|zFM%P|92CQ zUbr2NI!n>~L|(KMCl!+p7#d6S4V~n#SnDmNL;TXdEVYvw6KlqQOUiDh*$guT8C0nf z({08P`9>-szmZj>0?nI9^EJ-Y%9S~|#-!GH(qpf-A?&l#q-nwl7 zTz};b5+1MJyq@;fy-SW2oBnlI%uJT6%eCYI!F)9lrh$M}um$$QQ8)wF;4ZuXV+cQo z0^y)Q38)P9papb-zAzjn!CY7d8$g4@a0)K_@Ng5Jh~DqqCHz{+5`)VeGIRc(BMx3t zLdC9k?g&ex zrLt_PiQgZFppDaHlM-kM{*b=_{V}{C<{vH!`YNLW;=>ssC<4;8H}N_orIs9Rt=ksV@%MX6;vgnJbCXYq(c%T#^b zn25m>%p$?6q*%_9@h99Q>$VtURcbk2;l9I_BlvD8<4{%~5Alap`O9KKvO$Rm$yPy} z@K;_~>f`o-0Z1douQp{?LM;y|{Wt?N1Nncjh|p53KCtB>6}oB@0vMFoo#=bOH&V(D zjAU9lte)oq6`+f@J5cdA1XM$>1@mDUc{{X=4yA}}KBebJNw$C}%^spGl>;K9r0iH| z2b~BE)DDFz%jAG5_%q-aIE9|A^$SzR$(CpMub@n{WUHmg5y~_xE&Y)e_M#d?bvh^i? ziC7jE%fe#C(n#fwnCw>kzD!V-TiOudt|NOH!L8b+1bvA=;Isc`b5~&tjnf(`EYXsL zz}-YPztwLWt=(5iwD24J*}nomm2bPH%_yk+C7a&hr^HLP&SF@S(l=m8JYQypOc;ke zQG1o7^ps5k|2+OJt#@IzCi_;l>np6jr|8ix$(HC6tBWW-OjTVphu=gviz>%V?Qt`4 zCu{eMDlKHwJp5DmS4D&3Y^V2dAHfTxf5dmimCGid1V$=BvXv4mO0aPz z7M`ifT~q5MHWAz@;#I2B#3XQ+;jYx0HBx+ymd*GT3UNv(q;I3Wn=0>2196w(uGa=N zQ;x~@4^MI@x_4p87ExF2xmK1(ly*1xQTRI3~R?h@CEK`+_zYLL`K6X?QA<` zoH6!8r+ka@dk3Hum27`v^}I1KMH|*p8Da={iuM9J6_c{N72`x8E>HigWVA7SLOa(3*8u{AJ5(gwt?_;5DsZkM{>c{K6&J;7drhYMR`O zqpPJA{ur2qHb+$KtsDqgi@OgFARWOL&=>3y1$rsNMQTqaLj2T684+-qh-XRGLrKXN zf-RzF#cM8>1e`&;t*scW95y)Q((DK@3^Ge|O2RhJ8@B%q zQ(nu~od}jPoQKLtwt{8E(&0)uQx*Jb_%%e%aAjF+Bis(q4XGDa-?0S5vkLk`hPGma za?D^KL2L~C3&ygN%~e(#m8lqHQ`xdSZ-l;L%4lVob2#oan1wVCOB2#!7$TC#DCaG! zk=DU6!oQ1zu}U@5pSU-1Z;O6omAj5-xFMfOw%E_|DleyUsOUFNIqLXufGSR?6r_;W zTx&31c_CYR;qQmTXctAJiOQn)hy63`Lids1A$yk7{+*~4l%1jF*iE5YImy-#tM~D1 zaEj!~%9;Nb)0I$nm_*tvt-ur|@#9)ug?L+d_=<99$V+7nn= zkzd59$k1sOzf=x)>BmvV~RFDlJkP8UobH{Co<`LR^P!DUuf{ceTk&l_c4603oyrSHD&CW6_~y`Z=x+ z{z_Pbc0hZyLh+T&XYpU-dnOC7RZ4SvSTYA>h=%xN$(E3;^;xAX{5YB=2@D`GUO2y2 z+B=uwuEAZ0)ej51AcpRD6SKco1_bOz{tkYEW2BwZ9IH9aTd(4qtMY`ZO1AK-qV5{y z3riw?d8mw*EPh?19CkFo?TI@OtB=Wd5GU5HRhk5hMj8hbVHznjG|ReAhj}sj4Y*6} zotCp+8UFD(1XN>#tR~soi{2ZRcg}&h%W&6Y_2+O1MsqBjpiS7MUr+Bu-vbZHd8_@p znWp|b3|p#mM^Rm}wZZCj_rhTqD0*+@nl%G?AP7>Hi?-X8Q;w~;r*JP|_2+aHCTYpv zD2oi1ztL_(`x^T7?Co!r4Uuze@M|xy25A#kU&znUpAHVzw(U^P8=N05NDdJ{0yS&W zshX0lx3*#zE7>vze+~XNY^snaDdV&Xdz8s?XtUkz>V7!*_3zu*IXEA+;VC3Rpe&3QW`@Pbk~v z|8kcQTMmE1O>$r5=l(%%ruOWla=_sH(1*bKoB&}ii27W&XeWNv4|~V)BO9<=HIQtd zX}_M~%=6#%O9S+2=!>ylqU~8F#6G&bxuyP9eSHF5*`WW6iDwnJX$vVDse8nZvq}ro zkGQ9Be-n;#N_o>&+`n*dXsyn1i(`3+U$-Hfdqc_AO$@uBR5lI49gaIvJ9I(0E1Ra^ zU&enVc3e`*o4ryvbEZf(ON#jCl2Y0nf*+0_nIg(vR;mVsgh=V&KVMZA$tK@aI+x02uGTwS zsV18S;;+Hqq3zC5YRTrK_^D8*2_5E-f*RS`Z^46l_1pm>Rh8%mO81F1Q1ll13Wvm4w9IlUUQ<2B~W5UX#d zub>e7QXL`RR7!;OM(zg#NEw8!M)=Lhv*KF3I#T=Trec##V~~#$zNqEgqQ&4c+|Hh6 z*lwd2Y{JgcgduLCO}(Qulub<#rXeg4Id{3IS%RHC7YwEcIm#- zShnwH9c1gPt|`BNMWB0AQR$&F)-oD@GE768shxPp)w%O4eBWl0&D|{Tsd(@-ykLMR z@L0*Tmq9KMRiPGXb(@JjkCldw_W0fKyJPj8$%iV6hQ$ymR;~I?vl@mp%PNr6Jgex7=BG1HxA=6$>UG4n zBT55N)S-BrS*7dw87S%ZXfgAu5-gVcDLxLPRFrq0N=tiKMqqQUu9{2PowYH~l@wVF zeWp~grT-GxQt#_K{9Jv<^A!RsL?}0zz zx7x^8>=@P=XjyQc_?;G-`?XU1;~M;jKw?YDR<@V5kcBB}FE{OqZA3txu33f95~(l%q0NqKB}rSCaAzVud4 zn-3ux+pw*-5tqExnO3C@`wEmmE8Rv6_EA?l8sc}t?~c{G9mg`_wL&Jn%d3z;h3ev&uUgBRh*TI#5H6J_>iVfuoz?K`Lld+XSbZ5P zRnBYo{nT1=@DX-DeP8ZQtQV{zZ(EvH*UFfh4zU5AqVSW7}5yHBs`{__JxD?oU;%PBm9ch*Ta2?rT-+(?Xli4)~LPsB~<<1(WyO+;!nit%Pd3vCSrwC-B935qy?~r@DA+z6B}Uz_(541 z39({Om?|QVl5!HR!CzFljn!NHjFe`gK)5=@`V#3i7}F)2SGu-2T%993BGS2jLgFu?l+&tzl;AkU8@RfI)~GJr zO~RHg++TOm{wdC$Z+?hSrYmQUuG;#Ns#!L->B^x7VMbT6zm(e8u^fL5{yMBaAWc|I zQ;|?wePPv*_P{m5x4UWu%cyo^@N0xg-FVToTV8$(@+E`TxE!Nv{T}fM_ZX^+;)ypVE#LKH5}K9ii1oR)rk)q0Mv>I)MfUsBo^k z_Pnb4Ku&nWQA1zo0|HfhaJKEi^#xYnuJpeSwAfvkYN+GveUS&i2pC7&q#j~v4Rxt= zKK=&Sg0=&z_q7w%`e@B+s^NyP4~sZV>_>>}$?F|Gxy#3vV$eU!`fl9r&3hE`< zT4D88dyt(W-qu%_CGWY`QiFmceG&0(`$!dI_)K zA9yWBHdjM3k5J`we%rZ^KDK>6UrQhGQM;0_rFQwK<>YHsT71<0&eyX4`l#K_*G`}P zsO9#NGT)E-!J{9=XZeYrKGA>jwdET==Do|;vX*?*Jo@Gh(&$+qHB-KpG3KLY&DU&J zeeQJr^wA8=7ne`}sDmn`P%87A2lUk?x6q%;17JvAURWn`dgRy7YcX#@ z{rYObt<;7F^I(FP`*K?9EBdriuUh>w_(@tw%-}_$46SvVej!o|VH(2X4DCT%wZELO z5#c7nbFAKlJuLbpT!L))3m$0`+N=Ju!?zzl3k+qTd_T!n6I4$+ZY+Qn|_205%Um+L1vYPyIi&>6Z7kZeN-XiK}R4Q1zigkx|D@el0#6+r`s zHP4=EHKY9@+G7|$ki*YFUV9m+weF*?H#mqWzO zn`M}`f3(`zU`|Ikity(!Eo_`>HZ*yGQ0EJ7>%ZU`V4Kr=#>=vcvX7r#lzZ&R;&e${ zlJW19rP&e7m*sw)zC3-?s1+Gy(pIL2)nAocs@G4pdN+?}g{(VatG4oF)}L9YvfA|# zLnf$&3g&*h`*p^*Z{Fm7x!`T?=^OuMY#;kB_nRY9#{W9X8Lbx^GKN$&<_@0VkvmsA zIf2c`I+CUT4(I8~gDEAy+t`tL@qNTEYuje4LuC6xv?Xu>E|YV2q*iXOI!dnj4x!8_ z_LEV03;%;k*We!f-;cIvH$e?Hei+?y|M#Pt=$SFMk5}#>U+>&l zZ6CR;;#9598uf{6O-Jhl8u1h3w7TmUh{(SXe8;oQ@w``n)i=(ARJsdKw6E8zid?QV zdO2th-N@^Y)i=%$fBNZZa=!NI708V7!fT^i)jWropx%P1j2B^>@XzC4#{XlyXs!D<@$cf_r~W3jvH1o5JAA_g@qCk7 z&SIUw_)XyM9-DVbr;qRtB4Uf$D>fRj1;MUZ{YiX!MKTDjrN;Xron+y+O`UH30^tnF zcP3~nx2ex%$18*;6D3=ZiFvhyXd*#u+ODoD)>)F$|KJ=ljL-;}0MkgFh1IV_Z@?`G zr%D94wK3nSEe!Sz#J54{ly0Ya!Kz6vB=2!zBpU5)}`q{Ps0cI&>r49jgyONy4R|r&xDD?Q0)`JRHWr zB+{l%5d{ybt*wjjm%=KvucwGP2i3aH9r(LoFWPrlT9E>HD9aR=YJ2-Jq?7O~oG0bt z6w&pNTGM(5{~kO`%h&x<2$E{*59*u1-~E^K`S;@ zeE+@L$C`{^6Y8NgoGL0ER@+6UrC{K5)KyBzJb{|%o*uNl@31eUqsZ*wk zs2|kg=K1)G@RzX2AJoRqHTdgb3)(g;ElAaPs4kBDz^gq6kiLhbgnynY>_^m^=Ck;h z@&A}AIv!DfvEIh_n8u~RG(H$MO_ckQXBv$kkDoA2e1UH%f?oo^6xJ-h`%#T9dis21 z%hNGmv^>50Z7Y4L`fI4Wyi1Sz@FJ}*8b84O;eWk&`J?I*LA@25%EfxdDsFE7WV@^;SI>Uh zP1`JXXV^YVvR%gN-Td@M>+USA-9;({f9Pf|Nt3d8=_X6E?ak7DxuRO-%wq^I5Dc@q zbfpPiQOM{GRcOo(6`*b?Gvz^1%9rx3UgkKM;mWtU=!yBhZdW;(wjqqEqCn5n(d+2w zbwb16>v0)+vQA}$Sgf8fmhy+jmWp9l)$p)ANKu4z^pw;1kHIHGY`&_7W!8vbVlLPK zr6ZZJ4fc?y6B@;|l5iiA@f$}o{|Us#Fi#;of~K*|nF9%NOt=ZppgXy}+E!S5+1QhiPCqb{=?D zV{SCq242;f`U=*8KK%Mn=1`togU-Uhn#_v>d}uABYOTB{P*=p}sDYW8wHY+{9U9bO z$~O3|ZeB`Oy*%qxpXs`wY6E6mgF|o;?m$LE1`gZ?DdP>+o|0FN!xo2{aDiQ~1k9(* z%mgug;d=;f#3t6T5wl*?uhrOhP?W^a8#8AZT!-3C*o0sgL=uZ@%KTtZma@(g%3jz) z&6pt(GGHw1fD+A_n+){4wk_C~;0YwRlwvl+PjaS|ag0JX^lina1WzEiH4_$6DG3{f zU5MQcac$6HAn4C19`;anr?JC8NAN-0g^h1Zd+-=)wqr_fo@58?K$r|yp?)UGk11HO z?b@@s!9mCcM>-Rk!B6lOQajL5aFCk}ZzxA~q%laqzlg1YeT?ncDQ|(lV2gKV6yaM4 z>B6W&qpr*j37OyU(3DEK*bCTQ-FP;iGou;khqte>z0sBK%-;k#p!=PBFf9}8gPJ|r z{LqGBFF|^*yqpeb^C-8)ra@?LjvUx-&;#1Zl2-;F{mAeVoBmQm0tOK0!y5X_Oc_ro zmlbFKQWG-^vT2P$(vOfAsaB>B`!4JS{b+s|?F{AF*h|=Ja2;;L19$>2;Vl^Y^1VRt zg8I_w$9ZapY3f9re8Cfa{+p6~=)aR{?s!skOtrec#} zoCBc$7aZT=N3oviOe(zl_s(NyG)`Hcnsqvvku`TTnE1?92KA>RE6e{ z0ds+?`;5)lAK*`T3FfJsP0{19<)9S|f-G1GyWk{nv72!n8#;~k!d*m$Ua8 z@08IWJ2ty@ z5lSy352sb8kSSNvDl5oIHPR_N$c52S%0*I%iBcYs$qk~E5HtNq5816@l#-9E_8>~B zPVP_dQaX{%dbpGk9%~Pv(3oMQ%zO#k(rO0 z{9aWX_49ob%&@?UAlML$5ZDolF#YHI;qNM_dGtkj(=7NqabqUM9mHe?+7xA5rc^6b zMdANSN|N3r*0Ci@tSOTruc(&WF++cy+hGmTLkc;nn6*`_^m@e{Z4$-LO&t#XSp&yJ zRbSZ1;ZVi?CXTdXXKP1@zN)dYT&lUlESj}-*u?Ngj%cy7iKC5Z)zsnC8#Hwcb%^vs z9Zs?8=L{jj-HkOhPNt6*8_s786(M`lhl_zJ=_5t>#f-T{vx^xc^+(+tGgbXq5632l zx>5`n=7_Uc;vz%Si#fv_84?~Q6|&v*M91axUJm{}L{;)Cuk0osHz`|{_Tu_5hsReT zF@5MX>*u5m*0hOltnQ$)p+>jen(7#Mnc;npw>o(lM@*`|HwV&tfrZxTA1T1+Ou7X>oM8 zqjb?saZ5vU233qR$2WF2h&vQoEa)5e{2(9qv=s#)};fTUb{3}W^2~q$0lRlQoFIdMBC`vk~&mii(YnP zT}-U;nA2=LMh1?#U6N6k4%BL3)JlOqn*(#tXEEDrxy9exT_^P%Zns_zv+$5aRaYn@(V^#=EZg}A0tP}S~Ix>~N<&JRg49;sEXkKhR9-Q7f z(poU_wc8vz!(1ygK5)$26O2u_1dj2yK+EgE5iJk22M3NNSkxWm@Mt&VMgLKb(P57r z#wL%9LsL$OG@~6cb#eyIXYIhFH*hWz16%G0jMyzOmM3+3fjwOdw7j($Lu3w|wI_j= z#(}K{#u@D|4+lpN!TalH;qHzhD$>J?fL*r1{4I>wPDL;t`+i@+`p1y1va zz!>KOCwp|@Vh$5~*inYhfiq$a9EcLQb-D&NTOGL8YXb9IAij)oB>7vWH7VU}Ez_&XJ`FcE>Y(Ni|Nm>>71-`cny>x-J6>xJ_eFeb$B$$O^+ygnb-Zd!?nQjH z29ZZ8I;ck@4|7B>;txj$#~91OG5_80BJD5YFUEWw;OmaRPHb{uyUG8v{ui-+yu%k% znG<8SR~A>sJF+zSn|+w=e@C}v5|Q?{;l4{yVPMcUh> z*%H`hOLP{Ov7E(a^m(MF9>o8hgJ?Fvk?41DpeJmO!1ayf<=r@WU;7!`nt4{%3G7Ez z9E>xR8zejXP?hY_#k>jE6G(@e5NYf%l5HBX zjYDlDDo%71C|`p6C(>RbYBnp)_Su13!st|%Gi0`xO;bB?rfLUn0prZ|;&zX;_e%S< z|4L>TXG^)vwdjPk-+kisBVFKqno(@lOs#R)EO*@-4JVB^j`xrF^+HtgPH9+ z0vFR5{OgT1ou{_hK0WX;$Bo&C9~Ye`IZFE*2X5uYJjadG zl0STFU^ZI=&ue43Yv69~8h8d6J2LLXIy`7ybX9Njl0R1sj;)K*W~N^Lrf;pekjJcu4cP&UH_k3^?&Zv zeeuTan?L+6+ql%bfu~_Eo`#2*@FUKiLwyvHEihiT>@h!3IMg2kos8=ho|HfQBeQlV zeGJ?v#;N>zu^&%%WCIR7I>!W2hwe>Bgww~U~~zKyYQKBE(C&Pk*aJIT2yu)HYn z8d}?UgYhwS`WK}=i??W?{XOrUDO&2YdN>y_mohx?e)1RZU~SmO_|MMUQa9c=wsQ!l zwfy0W+27%$#m0;sfj5&YG`bjcugN8zPjR^Xqc}<9lSTzjQVv#nxmkbFylL_<_3=r6 z1#YF~oQxb?@fDUFT=CS=LpW*q=(}CX7kKs!qt9iY2>rFoJe5_Y7Z=EQ_nJ$;`?TLu z?+4nSYpRlk*=;oC55MCy`IRrGJI=w}e;ZAMx&L_kV4&q3UO)Tt!2Owp0jhB8)K{uR zZ{~2aD4PQpz!sm3<5n8LjJs!#dl;(mINhqB*mcF4{Y@lxlJN%1V>;n_osC@{$ z)+Dp-mo$Fkz3WTh2vQrqE~`o`Z|j1i_`)0eG~;s%=Q){k>|i-dut_<|5%gx~Rn^{s zSI4IEsFK;SDIz(>&uQLh@&~Tn3(oQqc37PQ%cfQj9N2W0OZfv0r2-eyxM-O;f_EHk z0p%Sp$tu@LI-Ck2zx-J~1DWXf@FpWFQ#G!7a&vH&z zcMf%FQun}X(H*w2vwTjm8_RyBGf&B-G_0{LjryJJZ2vuQ3mfl5Yj~nm@}#Cw8~@$s zbE5IzcdXCMc{k(V8SYDMGxIKHJV76Fw>9-V47_L=eWT?z##?QqJx%Jf(fCxf%i@ms zNvxjX$P_e;li;@v6IW(9%EgRefKkebv^q!Xx9RB?axF$F3q=9p@RT~qZFAgv(h)?X z-xd_OK)bk)rv*ls7PybQaSduP$QoOXv{O04ZnmjW#kqsl*osGsCo}j8a-DF*XZt#o zlc9E5oPn46)OqC(-_6s`Z`&Q7hP!!}C5?D3968G8WFNaMIU~QWjA!hZmiWNgGhSl8 zZajuUx-- z9Mu8Wy}(f&a4~LzxG>96uvKaHHJulL+q9)&<64|$xpCnAXDRDmF^AUND;=0qA8Zb` zE5CUzdD5A#cwDAyoswp5gx_5AgSwdxSVXtkj`aRU zNo%9FM|Dw0s6VNT)RpRH^|R{GvS>xMAG8UsM^W{(mRbj`yVg%j*2ZY@(R*D>A}4D< zCC$^8Yo((PCSFKfqixl8YWuV;k%zUzk>jJ!YS**{Nq=ikwR)zZrn#ndrW;AR>9Fad z>6+>#xofXKQO`>oQMI zYk%t~>qP5xtJgKp`m=R|^?O&X#AZ>OJo`M2Tjd7TQm008F zVLdisE4Je}F5^$!#eF=&Q@p?zXz7(|7T91%5fn!$e2ec;5tUIDHBcM%&=5_~9R5~3 zv_%JWMi2Bxe+)u0hG8VeU_5@p6imlV%)xx{9kpspu>z~`5K0CniV%dsfpmz5D}&;% zR*0e~N}x2#qC6_$2UJ5%)I&owL36Z1dvroi{D^)S2tS5m1V&>VCSo$CL0~rKWe8HL zEu>f?s?T$@E&Y=64c_A;zCg>!0l)@3!V!T;#2^+4aKk6w&vUHz8{ec`kx`jsvq$DWnZL+9NNTJ*7^HT41gcTM#E%|kU5GBe5aY5fXSGS zSuh5eM=ruL7=y1Q*TUFlBe@kjum`_@?-479$>TVkiR*u!;v%l#I&Q+a`R+$LQ57{|tg9=tzLCcDZ%APrU1PGjT-jQ-wIw^swyrX} z%j_w$H)(9&ha4blKRHa+qsVbEcK(yh$uj*@Wig#JuBRa9V1aC3LK=fFBUj3{wKCVs z+(;VRZYFi?g0am$@*s>IAC-Ar=E>CbE2m|{S()c${ze*uTqLi+*ui!3Cho}P`{ZLh zhjF@Jlkf2fYL?XPEo3l^NraKcIBCdqh{@tlZHQwb2|gGdGm*y5vyeGpY?OyAfWk2P z6eo>7CCP7L+$-hD?@<*sQ5Qy^hGbJ1+qIP0+DPO2x0Ma;Nn=MH$*$-r+xy7uM;g!j z0i++pFbd=F6Q;u0fsi>%<{X*xNMkY!$R#lLohL8KwktCKka=C^pE7UC{7dGYEIj{>9sNziLp+r$UXsQRUz6`;{mEFDHMQMB z2Fp5(jDRtzDAKqOV#qio!3SfkOk_3~{c_37la=dlG~|;F1!R6BvoL8)uqas)-^%v# zGAokCb^o5Mikhg4hA_5kO14B>82jl+8k6Zvb~oCL>))Hgm~|g=0Q?vRV}ntovB4N} z0*pZ?lhZK^MxS{y7sy;BbBWAlGFOnsSS!i3vdzCy7Mo>mm8r|zA#<0^Ju>&n{Dm|I zJxCsvZ6`_Ntehs#<07u$I*bY5l=)XyuD`MI9UAV-#>X@`X6LgmFudLwPjmJ(W8gug(Z}dN3`U<( zx0QKM9B6i(wIyP87J$c>^%Q{av>9$4aUZ~$b9$)#!QRKEH1O8%x_8KsauY$ zh#ycL#x}KO)|FXbWYTAEKs*6JHr^DyKL)8_K|G^WDb()CyfaYAxB`0 zY@ZBH)Z}M^N!5FWj>VoSmsmX`Wr|4 zTsFLv`C8^%neRzsGXIcYWSc2RYMX@&hA~ckOvi4o)&Opxu9WlohjU8W$79nT`?$+ktLv4bV#3arL@7~^l2xitsRf8&ip zr(qZN;UJE}=y+1*X_;q9V@K!7i?Z#C%s)tD(tnbF;cwagQ08NqPf259&&k(0xc=`c zKEW7R&B;3#jGYI|w3EiVFfu~6Mahhj>5>^IGeKsOOpi>T%nUL!`DKwsW;U5QWag5Y zM`k{m1!NYMSyX0mnI&bGmievBa-?xE<;m|6`1-%9Tv=VNtV!0DbwjeLtXq<8VVs?g zGCRw4UCExZt&hxpa@_#(>wylF>qexeUl}DE#*oI$#*shCwy83w%M>za$(%zP+sq>u z$+l%OSIAr`bG6L1GS`zURQ>LH$2Qe}HR|uE=TV=dLZV}$ebITMi$zzCt{>etx<~ZD z=uy$rqZdT4j@}u4DEfT#jp#?w|3q73qGG%;xnoMiRE=pK(

V%!rsNF$-hX#B7f_ z9P@k3y_gpdwZ__Rc=ekm?sUpZA*s4Kzm z%HsOQ^_{D(tCg#dYq)EgYnf}S>$vNR>w)W&D>61yY@yiiW1GbGiX9OAD=(Ie0<&b z*71GghsRHeUlPAD{$Tw1_#5$0<3GlSBsdfN=@arKlu4+T&?uo@!jB1~6J{o?O4y!o zIN?&lorISO=ETTEPh#%G5{W+~Hc9N1*gtW6;_SqAiMta|Ctgjwm-s%>k`$GcF{waO z*`&%zwUU}A^++0(B$AdVZAm(kbSdetq*qB+ceFdJ`x|!!cYSv!_W<_<_gwcn_a662 z_ht7zzx%B_*c0Q)?D@u1+0(?+*)z~H&NI)m%ASPK?p(-4hPaA3NdgY z4hireJu)IQvLZWjA`kMR0KS1*Sk&I^NUHul^$)0u>Zpm@sEhh&h$d);7HEYwXon8y zgkJa&{V_rXoqg-fnN9#eK7!j48=%{!8lCB zBuv3H%)m^{HagG6LM+BI{EXFDhYi?@?bwOk*o*x*fI~Qj6F7xmaS5042d?85ZsTt} z#A7_gbNFBK@EULN0Uz-hicd+_V1^Yogdh}Y5P=xD5Qjv#;Y9{yLKb904&*`}ZnT0|lmPQ)S} ziEzV*49J8m$c7x^xbDd1=cgLU74SWNKsE4_jbwhZk=y`{(E_c(4>XcHp)0zhC-}ie zavu!DU<}1@@WYMd(HM&f;0GMZ(=Y@4fFpT67N+N-FQ(up9LfBIBY7oOV?8!vGqz$U z_Fx}=!66*MF`U3DoWVK#iVL`e%eabbxPcVh!fo8ez4Ug@HN_)55$X;{T%OOAY6k9h zSYd-5{G=s04bmbK{In(6iC9s8ha*!$Zc2XLIXOQ}id)%lJ>p48QrrbIDDI7;Uhi-q z0%?&Bk^1W$Q4jM(^f{CITgeZsiZbmCMm>KRr9XcdeKCugchu3S82xCJ^UA<9n$j_P zM)ayxDed3)*znISv0{pIhi%JB#l0P(+b_;YF>tCgKKM3^_wYb0n(Fk~UanM9-Wok7 z`RS1|)fs8~#G<-NNfGCaCdV{qq$PNj;tms8raAKkM^VNgPPCrpEFYepG9$7g=PHpn z${8)rv5BoP4aJR7y3KV)&Y12j8&!^$^7tM9nlrt#o<}bl3@cWlQDF)k}^_6%yyQK{E5Y> zm=1witV+Ei$+Nj4%V=6DX3cgENZ3fZ8QZZFd$FJ1hsa|h&m8Bdgx@Luz@NB@JGhUB zcq%T=ad!16YZSKzD{KfsD8kn$DQU?_(R!|PScdeJ8Ic*;krTO*7x_^TMMT&NX_0R}S#E5yEjZT!^#MXJvhVgwV`(rSYF_is{Bu69lrYtp|&oO7~_r5ulmn1UIYjrmxJ#aN2v;+Mr7&@Re-_(gm&7Ee%~!Z}fR z35!=Kuj5bAdx^7Q>^;f{c!Fnmfme95R+QN3jGnQSb8T72GeEeOat$LW(;)^fB)}sk zFXe{GMVSW$@C^#1D2lIBQc9EG3ePf*t0s$eMfGLQqQOmRZHcyGI*Xy*D0`tVJyP%N zXUllu7(&yC|7mhBXF?Nbnk))0=ZQOuavp0JkxQ^l%v#P~Hqx|J9AC}>@1b=c4hqW( zo^7Wn&)`>F67^R&O9tPhybE3eS2)`yzNCDOw|I|F_yTo3cfooQev7Afv7ebnBn?iz z_Rr3qYDG7-7wM51S&<#Nksk$72*pqeWl$F1p#mzS3aW{yRnCl7cboM}N(WJV6{l%7 z&GWHX3|Zw|5Vo3fEjD7a$g|oR8{$4l@1raq7u8p@#h;Xa;ctEVYG;r+%QNa%c!v-8 zh|kbAC~h;X2tl|wzTTP5m60+lvLh#QBQFYyuno=_e_6_k_#Qu?I%=aX8lo|pqB&Zk zEjpqrx}!JxpdSXok0BU=F&Kvln2hO|g*ljqMH`g7?qw7!uo7#r9-B8PDLT0WyRZ+x z;2@5Q^BbJGY$Z2x$~PMK)=sBWxHmc*B~+%V8fu{)8lVxHpqV~vqtmXsx>NT=Z}ha2F5o z2+!~Wuka2Z@DZP(ZQ{zox=Be15kt0c%V%RTm)N+4oqa=D6vf43WAQu6N~kQdY~>kH zo3cI{qNzS;t20Io>p?GWWLoDx}d zUd=92{*EiSE_&C9!{&r_J zn};$3y;5Hg7P8isgQh(H({zp|Yhjv-Zw@>)XY62rax7OAt{u*XQPnAHqBiQIFHFPPq!}8EdmRypxl< zo5j62fFr`Si?PmAUc~RXLeJ~upJK=^CiRf=DW2mstKO6Ui08Zb=o7Ss(}6IgL4>Hd z+u1N8fzpEv$cpS@>uwIA5M>dRKq;Z_VV>VpRuy^nI5+30OW6>O(G<1OCLgU@>D5viNjh993qt3PoDU`Qx z8~5-4kLdr5d?BtJ<&3C0pR`5rG3W2DFv>KDKsrPr265ukF=t|kJBQ9qz;XfMInI_f zDfvKg+_^7LQ_7ZTjkf5BuIP^5=!Ze@Gr}-(geZN2`*aTF0xZD_eclOYO;x-)=`36D z0u7gO6*rKATe!{k_s9o$geQ207kH(M604kvV)QA#xXQ4ddlxyx+Ed&l1u2W5gm`W& zmZ$t4RYif*JeTWHHbN6L6GM!qj+C9zeS6?@;c1>SgIP?*aEun7Gc--56!e%wE)c!X zIO~S}Ot}UdY2K<|IpZvX8k^&GXKw8HQ1gz!*%xPnZgUS(roj z1!C5(&U~R8S=HPS`(Ob47={rTgNbyTqPM=}%%u63vivhvVI4MM z8@6LN_F_K{;1G`B1kT_bF5oh*;RbHuE*{_sUf>np-~&EF*~JSkth>NmqFLZBJ8Yqv=^PS7iScZu8zW^}_&h=ZdquV>snVjKxHw z=lC!ri;iOBbq47}*$;#C$Jd>i)B+=^$6*qtV;1IP z34X>}Y{WL~Wc0n{K~eEfXU&M~l(%pj_wYch{gd1D4dn-Xrne^46z)m;9zH4{0_nsL zDI9JhrB@6x%B++*#o81mU68T}ilNjVC8ewgyXkD{tioaq)ImKoKoeFA$4zIvSbEc$ zJE9X!-O!8XzGQ!K=cdzV8BRG;gxz8{6Dg;N;$-)#Oj_~$Mq(c;95QhXL!2=&MAq#TsGd=|c-*<`}Kd@L8)ln0*Q5W^m z7)|Ngl57p$`|mqf7Z^;LjG-8b(HM*In21T3f*F{}=5xvUV(kO2<~GWm`t=9SI5p}3 z^&uR?37os8D*>esUO(p8tXdIz;}^YXX#5h zP~3UUl^sqwnnA{slSJVsyy4HHoQFkNCZ<2(d9;CY3%1d7r?~lq3wV&lqc|xtJayKM zxIlRcS8)Tk#Du3@*C&)O@Jbwi%I)=u@(WDA@OcJ7y62fQms%hlwG#>OAS1FNHwvKe zFG@;rvJA?jGOCLa&l$24Wf$~7FZ?L3KX;DHFqCp6Mq?}{ViKlc2BXa;=ZXn0oH5pQ zEUNtNi65P1^(QZ!gEjwdzNh;H?_uHloe@YvUX(^%bcK#Pc#jyqIhYlNP#rDN8>2A~ zYjFZE5zTkZN}vULVJzn1Bp$6%{BAejV=99t=!211g~PajzwiNJeB&hp%Ap}TVj#v~ zI#yx_j^ZkwAc${m#G@0I<0Ky9INuAXz!%T0Hu`zjq4*7NV#rBMwnu@vWU6R!}) zmuCscf&wUwAJ7v0FbRvX6(?{LpAf~DH@Q(3H8BtZtNlFuf^)cyw+P`&gnVd#UYLl5 zSc`o)i$C!Wc0M?JP#PUE7e{anuMo@!(ZXnmQCN*P$iv6BGVs^rp%waIJQiUm%zQ3t zf<@ScW4Mevcndq99}~UU%}LBNk#WKJF{eiz!di z;;4#7Xop@%#uTi?Zk)wkyn~&mR2}rkd~DL!zT?A^zu-aMTTudK@Et0l3TmJZ8lVYU zpba{p3yQqqAN-I%qCW;>C`Mu|CSnR^U^eDsF?zAxa&px{&gMFbP1uH=*oy-=f)hA{ zUvUXnaRaw-7r!vj1M&%8;0-?DI_;lH?GTp`k68{O!;u!zh(+QdKc6=!G9oK-A}=z$ zRjL&vi=YI`;5$@871Tf-G(Z!yKpS*G7xci7=#Rk|ijf$LiI{>JnC<6b7CW0yF2-`K z!a8iiHtb>jPI4~};0R9O41UEWT*VFC!d*PT6THA1e86XDhdCn%K{)(rd5A_V65&Ng zWJONoML`rn2{d}AR4YS%hf1h|8mNN?Xo41KgAV9|9+=B^Ka%}17(+4gFxP)9#Y9ZO z49v!SEXH!I!a8iiHtfV+9KaErz#06CO9*|hRJ%&vz%ATGdfFb4Pw)Z-SuTn7>kLRf*F{N`B;qQScP@igl*V~y*Pj)IH4a8b*+!|pE;(aT*Ni}g$HM4#Xf48IT?MPz0q>0aZ~4jnN7n(F6UEj8P~2O3Flv>6nWpScQ$) zj(s?S)3|^?a0~bG3~%unmXk_KDAFMo9%M!?6hv{9#rLRz`e=r>ejd7@HwI!D#$qyN zVgZ(8Ew*464&XS>;dk7?9X!HIe1Lk2Z#W_iqLF~~$cDTqj8gEI=b;K}qY+x71G=Lx z{1}M|n1(r6jFs2`9eZ&Yr|=uD;wJ9lDc;}{%%_zUJJLes*Z*MUVPd#NfA4d}XwEIX z=bYi@EPjl4)YE*>Im25$raG+dgof#8V5{T&Q?zF3QXORoW2D-8=ZvmXfxlMPN~|pA z>TdkCvOb!=u%t`37=N>Dsj5HxKEn-F%q{AeCI(h?xsAVJHn*aytW|$h)nzkptL2($ z)|b|E9Z2i?F7B}Ttoe%hmbsa=75@&mH~$K^a+)F5eZt?-K>;)Wp zxE}sb=->8^VU--s?e;Lc!|te%#_cGO=AY2kp+N~$abdOX5uv6~ zi+xz=VtZ0(&+zH?)nRATJPdmn_7{)C!>6Sgl4g4N+cdkvnlZ+Ju*cyWLcb4d5#B3w zPFPrY_3(%^y+RwNDHVR*AG#>?ZpZ+8j?msA|M$N=_R;o}VSDU*?El-I(Y`OVOPY%8 zwqaPD-Ir!c=(7-eSR#Kr=VRC(VXZ^g+q>8wg&q&@Vy_!oG_*KJaV9h_tar#C;mz&i z!m@E#xx$x+yB$U8yCThsG{?h(jlnCj^P~FtPOcrQ-}pUDTXa;kH#%o@(dhEgwWFIw zcZ(hrJ&s?~Sjg{T?ub4ZeU{(DychjCT8nYSB*kQnDID`%Os$v}F+BsnhdDoHWz4pi zLow%KuEpGs`G?=bjCQ)6xt%4QRh;#m?VP=xL!1+wbDXQ3TlterzdG+YpF6)eLtRc+ z23KystEj88tDdWktG8>UYpQFxYqRU1>w@dH>z&IM>x#`D`)zEU*tW6#V<*HeiQN)= zIQC-f-PrfBVR4?gJaL*Y#8)P+62FbvG_FHj?>K+ln7FBN^Ws*-ZH(I$cR22B+|{@{ zanIuZiL=H#;$!*U%$)Is;>*NWj;|BnBEDz*(0KpU_(lAJ=01Kw^Je^ucr77qf;S;Q zznxh#p=CnPgpmm|5|$?D2?rC-CEQ4On(!$hO=3b~*2JQT6%y+two2@lI4E&q;@rfa z6SpKD;9tDoPJEMSPKrqKCgn*gnN%&QSyJbufk|VNW+kmk+RkrhUQW83^dZUScDXaU z^SjHrtNY!}-96kx+-ZHWzDe$R{F3HD_iyf7?q_bzlh$MW7JkY z4b9`8%bo|G_nshcq&K}cpSPU1rni;1r+2h>ws)m>yZ5N~iubPfrT4aW*EG^p$<)QP z*z&7pHkUF#vs~7?sVz)BEWJ!2=6gYXZFx=QK5Feu*MbynY4FV84?#0*n}TKsozhxZ z514Y>>L^FF`N3XuS!*qe)Al5&isghky{(S*S4$K1M^&+PvedFwvaYl36z99SV*H=2 z_tfmx%Ia)2R2!}>R5jCn+e~GwwnqJEX%W0sxym!>i0zGKr#4@)nbK;@)oRN1kV)DM z&8?iWl`uKAO{&$FUfZi?)n1r#Yl*fqCWkq{xsK^Ob3rZKq*-sDKH4Vw)zRvF>jT>}Td3`X+Rl1NjW(aM*sR7U{WrEqs}>w?y5z*d8Lcy|wX|13vB6EuZEWY%6lZJMeT6<{ig8OPeYlF2C!CSOO zN(-f(l3DGd^iuls`-dZy@yZnCT+sO7M9W@t=Ag6|r=^N*wz-2^N-b#f1{JiNGY?fK zn%9P`zxjZ5iq&nZ7t|`KNl;eHCG$Y@2eUaSz2#@i!k}Kk1A{6BE0%C&Z_xJO9=2-1 z7fcJyAJjwUZb3UOk;L9NUqLIzt3 znTDDlnNm#SO{2}tY@5tQf(F|v2R*W$2s&nJW=S^PuuL&owZ4`bLA|WIO~Wl2O>Hb| z{O0e1ZUud(jd{=2;85(rPTF*S(c0(I$nP!eP?bn`| zv)a}KJ+R!iObHGNnisMxWMpuxvfDb|`b_H|e9K(MHq+F}R?W23x;&_e`B&Rw+coo2 z(LLg92( z+LAi@o8BI;n(=k5lJ(~{I@;)C;~Fh0x6Ie}aob5fm-e1KaH@ap(2*k>kN;!xpOcj- zqD&vp&*D)ZPZ5!|uct`(Xjy=rV7ka188e;>czuCHgCD%|}&m9#D`M2G&KZMt`W zr<+O3RbNaQ?5QSh5B4OhZ}pabPkBwN-A`XN#4{yGFEPerR&51`RvLPLXxrg>m9d^0 zsun#`EF9;_Z0lWn>=4iJ$PxO9ah_t#0KH{QESp#WSr^AtUVj7SDEPx~= zOP-aj#pXGl-}UixJ+-Xj)?&{id#A}?*L4*~mw0~D=Pva)RjomLeakY>0h8#z($ge- z%)ILJzjo`bh+kKFDv88Zo=sZ6>iW4=o@A3&qo!VMo#(b$+umExzu9v`)g!lh%Bp&! zZJrR-US!%I(~Rv~Db4l9x~HfrW^ebD(nM4J>UK}Kswo}By&aw(^=3OgC)CgvwNv}H zSI($<#@(Lfnto=lXRfLh>8*Fz@0oAa+O!t7qn;W1!lR!4s)#@Cd8QR;tB0QO_gb6DqR1J~22Ck09-Q&?(n@p^P0o6DMvLMJC6!W@%499OyzGk#{`yrLgg4yU zPUJo387X$2^K92D)zy2P_jEIBd&Y@d7d`pyugBM&@O19WxuNqq3I3YQQT@RsPb*cg zcG)vr6_>7f=9_=6-nE84@T#Yb8atxeq*~ePXY0LjbfdtDZKL#7it;2VUR9j>!}CI0 zHdNnx&C^KL<8OFAYufmkdhAWlNKIe-m&c}RaTCO<+n(Iol9A%-ZBM+my{&k2+fz?e zyW`oPsMS7S|5bxl4gYT2xx;~udpiBvDQUoj0doeN8_>}IX{gA3*Rw&~y6eenJ~qC@ z1QGeS=Z1ZHt)s(?bJ;~8ubu^&B^gI(2(;o2!(MbnQS--_WMJ*qzP zlP9mL@BHj(rdq31$zJ=!!0EBxPTH^<(?@!Loc`JSL9MBOQoZ-pFmK1^9aFdO&5iJ< z$y;3&c`e>H_HP3>XgkhTbNwfaH=9-LwtEw`9-Z{7cJDh?TnO`)P#fqm;a-zDKG#o^ ze=bgZIhPU~%@ib?6bCB(UO-kkPg{l9i?F5ai}PSvJN z*T+YC+nB{Gr?-pdt|?l$ysyF^_IcXpYsZ_4s21yOq;H7zE>+b&dd+xmQ%!71^fnS{ zlf2DC>VNInT69kG=F;aUd7r5IM2~lasx|B*O8C4^wU4On^VZeZ`MmLFaVnE{o3^iy z7@FCeqTTPKx5(oCVA7fm&>Q9Oj#u>uIlX;NdaJzNLz8 z$)X)-CGM8-di9WRy~otp=k1K$cT&>v{@ONh{AJ77zb{)awy}6rmMPXL=k1^$DCeE2 zrityiqN}x6v3~3NZ5jA()LFf2d9TeJUaLymcRg;7P93Ga-u-*;Fth%qs<(w|Ur|f8 zDy{XJ)x8H*-C5I{RnrPq)yvfOmeBN}b-iWO;C+E}+KHQBS3U1H;!Qp8NBf@IVRceB z?y5hm?=5K)*;{zeX-7thk1f2nL(+|SGP3CC663Or7Y|x`kBCjJy!G_Z*52isx<_nm z>#eR`8z{7P-eX$)cyXeg_n8(}M&H@q>()Yx4&FPw*vNIG`;HACr$6ZEt*7c`J9`Uh zT2{Y4u&dXhY72Vm^+-~5_A9Yw$rU2y^plSi8&|dTv2p(xwaK5S1DD!bB#u| zuGY?8_xnZ<8@rnB<_+qLcf6SK#k+KRF(1GAo4;0}!6h%1+Wu|nviHA>C_ka1TIqRZ zYtd^7@wR4SYVcs_7j?lk`5P-Z#CkM6gycS!~PU%Pk(| z@U2om>C{{+6CEo1PU(ey@Qt-PYSypPVttErE#I|l+3I%N zf7+?3v=rkS)WZseaK;~1KuVlWL`B|pvQR;cH$qWl*qA5`>7m!&bHT_CS*-%zyMVVD()|6RK zW@DKxWVV&rNoKD=CYt)m<-z2?k3aaXDRBJ11}=oCG1ZsWfB8QbW&ZKhb96&qjFEDz z$uegIGSReD=1Q6CWNwzZUFIJ0-*dP*J@ul$Nj>=)Ob2AgqcTs)JTLPS$=i@GzQfY^ z)TxDxHty?0)7wCg@k;FD)bpMsGri0#GIPqzC$o^u;xfy~EHAV2V=-|W_iJYM^Y5vD zy<09*}ud<|&!y#h&Th{=MX8mt^xFGE-#U zk@-O8Q}W-Zqu1xu-H(*xybAP7G})h|UJkF!Ofs{}%#)gxl!CIMsLWC_%gL-Hv#QKm zG8@Tk9>_#fZ!u5!3i!t>E98nbGB?WHCUcj}BQj40GSPHJF5i&LujO*^)6^pgmzhqc zQ)YrpuaSOUSTf0m>@xGnEGV<6%u+JT$*d%^s?1t4>&t8+v!%>-GCRxcK{hd-5+fTkS4t}A3*(?pYXOzKl?z3jK;smM8#r-Xy{uj3p2 z?^I2FN=Aj+Hq@rjR*T<|3IZUx?4l zR&1H)bNH<{QwI$hoBH``kL+|n=24laWL_ZuJ?Ccj)a_&C_Sa;;n=ZjggnOW@JmW zL3?yUSM)${^u+)aqVHgG2!>-6iqQ5GIR(=(6LT;hi%^R7-=V_j7;f}ldv6E{5Jn(jZxVKy1_A`aEbCC9WtCBAfl@&1QC69yWtG*2 zQlMq8vd8_3gwo&h+&}J3o=;xA?>UlXS(atT_F*Ckt;%anbx+nVF zV+&<6F#v057)~68G04V5d_#R5Hee$*VGFimJ9c6>_Uh=rA3rdC0H>HfjdM6e{v)n2 zeFJ~v&OMh~(tRc#;V6wSiEkirhH8Wdj#EF0Q}_jEa1OuXA}-@MZ`{HC2ka4#nD__Jk;DZgqXk-_H3B$qZA(l; zC-gusBv3DeET)SR2M`Bg2u7kRwHm06(d1(=4&yNq8Pq$w_%n$l7gI4EGojaK5$9kY z=3@aC;Y%#RO032@Y{Gix*-qR@+>EW*f!)}L12~MMIDu1eo#xLuT)<`gjz4f6f8jRn z;US)2Cl~OVxSO~aCQeL$ARa&<(}#%1aT2HS3(nvie#J#x#&2l!h}%Dpi5s|s6c*45 zZIFg^bV673KyUOze+JsatDO!-XAf^&q5nH1z($E1J=!7olh8+6!AofBQ=8z8| zj)Y)(0bF13XB>s`n21Tp#Z+vgZ8~uuaVBvV=3pLvrM8IpC6-_rzQRg8p=~vB4K_gL z>~tg1MBGZ;j-A+@Yj8#FWg?1(ABYEV2tT3%wM0}wb)2MDi&z&8&=}2-jFxDPcIbeP z=z{L(g}%tbKn%e!jD+h6_y2KBOu!^e!E}6vIS^QYFR%p5u@Yb78?47hY{pjXz;5ir z0UX9roWLoZ#yMQTW&G~q&mXvszi=D(@DNY%3@`Bp(my;*!2>4vz!!En5R6bnAR2K{ zP#DEf5@k>x6;Te21dcX^2_+TInR-%VAp~TlEW|xSP_LS&dV$GUsCMl!0Mamcp z8|@>WYV$V5=H3mT;>dqX?`+7NcE&~QYE<( zZ-=QTEtE$1-UtzScjA&mpM~j_Ba)AGd!QRrneqU6uzcUFS^A15cjMZ~y~L`!anbTS zapZ1X6GKdp2)-BhNfl4JHj=V=H{@jAQD&A8+F$r@cRcYrXussKNp`1ofV|GQP%_JV z%x|3E`8EwRN%{q?Gv2i~5TD(P8!BHAru%UvjCDgdh)Vb4YRE0MQTOBe%YGZ3)x0}7 zoCV#d`Cb<99>jHI1v);AE8#mnc(OgZpsktr46*oOT#8c-PO<50bir^%{@ZlNbl+r_ z(@lZ)dBXb<>t0tBdlc7B-X|pC+_1(gp{20f(b*Ul;jB?6hTRp zLn5l7A)28D+MpLQF%DBO6LYZzJH$i9FSg|)CZFLotYVSd2iUr!9r4ehPGtunli^_Ka$*~2@)JI1@D_agh+@VkM{ zynYNx^BQ68>2)*u8+%pno%ZAQ%l5|JsZpCfa;WuL zIxZ*vcHEAHthi3 zBSWW$UiM!S`Yim8zd2%KsOvyz3E$WLRei%FTnSxcCdK_4UopOOY)br+xZyEPe0>5c z+n;!+`T8alNhoSB8PVH!jo0j`S`ouyhx>*FO!h4r&?@3ws0gYY@K@-3-<}bxqE-hr z4tNyZWr7WeKxw8 zJRP2VU!rZNo8463C(_i)r?~A;_bRrcri!M>@BY#~!!*XH zm}f=LzNXx;t-*)fPPx4fXlLtXbN3i#8*ZxO+1zuw&m^18W06l&s}kAXbB#}qsi(_x ztIuH1Ii^sL1dqhXBf&n7v7U!~gzaMRQqwfgCXwq*VUF9uMIG&;D?3)&G9q6G4~(o4 zH!;%UylnnF@|(yvv3nvToF$x%9o3y@e13_{jLwe!!zazr$8jxYq@%g>dgSxS&9>(; zuMLv%bI(rB6`tRDx*5HUK1Q3di~U!FYpUabZEM_V+aiaE{=>F5dWUIIn0HhLZ?O(G zh8d%bgX~j`0dBI}7`KaVr93Ko)bse$?Vj87_aoZ{x<|PecF%Py>z?e<-lK=dK#$QL zRe67SvU{3)H}@>};qK$z8@O#WJKaofd(FE%lidb7AKJz{ANWXq-#Fg-tP9)ea?Isp zHkTcD9DBojEGwO_9X^2(QDK20mQqniJbyH)7R_1S@=I84%URQp&Oc1amJExp-&xQ5 zrVGw~maCq3Jx5yp3@aM=G%VNh+H}hq;Wyt>Iq;RUaiGau%x|TovY$`LCd&^Nukcfr zXg5{zb8~ka&v1QB+_sqGjP2Z9scT0 zoBPqx9#UAtBCx(Z674ll+5pM76W^p@U z{>zYQykltPHrQa6%#uHc(gw2BSNc@GYe@%=lFQ5O<=%z}`Dep1!*IhILmNYqp_deH=qb-IT<6`vO`ar#%Y8-fCkYdLt!{nf z@$wbJFlnCbDP@UwPZIXK%#yRvdSGl^BZKy6SG@O>^8g^6JEp?D4&}xxdOGWr?Af01+GcD`+wMhEBEBW0o zFJ~ZGi;FJh3`GqCY0`g-OPX9n+AG&)6_!XTh7QzDNP^i9Fm;saOnC|m?apswDn+;` zeN`;d5mwm3JQk^olqmVKjjHhvZx>$xnkY?Y2-$Q)H(B4n|N9&Izv=&E;`gTsC2U#J z98NFK81&y-(j-YlJxi$TeZ~-LIK^e<%BskGmJsf0Ev;h(s!&RlIvG^yDl6dOc2A0y zZ&4q}YK)Wa%JtEt0E9CFL_L zNiDeW{8gC1V{t|Mbm_6YTCO82@^*Qup_Rc~Se_@CLL0D6K0`8ZVLD^z_>Av*x-Me>; zd)4Vp!by?znp6BfZxc4iQ6l_ZLKRn4xrV%EO==*YNkk?_AP2Lt7;B+nKYqd`+{9zN zgLe=g5k)LYpc3jK1?lLGp~%JzEW|2&k3DYuIVP^YODN-$%1MrXo>PC;jahSxDyqdR zSjuxCC|I&NH+NKGWm7z9DrpyOnyh%rmMrpF5Y(26r3R&*b2I4<(p^NJ@JKp4rfITK z(F}e^sGmS%C!bt(O13QRh`W+%aJ(=|8PB*|x~N~nZQ4Ava$ZU`*%A^W<<&)_5XqLR zjWsF$hQJT~`%*uIW3*ih5o^4a(SFa!UqKF)@;pN&n^{!!QC?X>Nh45;5?|nJQ94`N zllDMwN}Rx26h$5)E?AUGmSI%0kweR55p7lKndXu%B>h74w|pTJkN_f@W0u2T9Fo?(*BCYsokXO^fiDX%8# zQED!l`zbTxYq&`n$JsC)DfdJsO#_JfhOLAobk^MMY?SC+>eKN%Zqn~fK|eS8R7NMQ zVu0dn@Cyp(bI$04f#F<1txcd(LN+faKTm!=T$>f7d@1`qr0@?)NAN-D2+3AotKn1@ z%YN<2JK{?$r@l#h7@}m$mLudR;2kO1Lbb7B$`skJSR@~$MsM)E#OBh%BNPtjbI5Ieq9j|A)-hU{AzQkVFT*NoTeUYaic7X0C-;oz!lJpb zXb};i+!9`j;v>eyD@!aTXz7cAw2s$`CFmD9pZpvyQM(hZom99;3-{~1YG4=uTMMmk zAv#)mkgovW!f)HDS&AsvWYa0~*jUN-iFi>|$@FU!%V&y_f>g?FwPD4S-mP*Uk-;#=GE_=a|yB0evv95a<6O(9Lw`j%2sWm6CG z<>VX1iPG$-dr1%C7^Rbt9xhHCPIfXCsH& zVr^-8{bpOvy8OzzY^MH#d%UPAQ7HJ)#zE^g_Fzy&#wdg9!exsu~^_JjNC(l!rY-Nc7 z;$d+mRLoqad?FgxP+I%xo9Q&G){w?@I&~GBYbd=_C_Y9{^Cd-<))8|bUEp_M6u+A5~twzZOJe7_P`xG`!{zl7gu^VHg; zu?$OW3AQH&mXK^YqErVZ-Sj!>a?(}e^A6ld-;-{}4obVkiw+zGkCFaF`ZG~K3XZ`@ zt!28BZH)e~Qoqyo2d0#iY+Fj^_Zv&U9PLFXWvIdLM{37Vp%f2trFbL|W4kJ| z)8=;PXz92?hg+yununm$Jo*s}=}QdL&h}J%Wy^31UttxE-)j$hy+0C?8_KZ5myv8i z+SooEUM2qQs{et7f*L{=`;#6-XOw`X*M*6L%v|R&~969{Mw%$qo6?N}& z`P+Fq*V{*Q9HcC^#88SyrE-$3re+zecp5B8E7?qJGB`h~g&b zgT7kyDCL;JorxD|$>$TU_a~CAmN1P~7CW1frlT{Z9z+%)Ey7^2 zZ>)0OGMv&VET(z2STRniW!gfzgOu-r9jDxO93j0;`VUc`c?lfiVYYJE@!4s*}%xp!Ti!d7`rL{bBz!ccFchPgA~7Q5&736qcQLDA*F&42hC0 zhNxf63IvLMla$l{UQ8lQRnU%ZowctgD@8uusza%-qW-;hJy%ik@2aKJzbgo?_qT=7esxT8!%A& zc7bA)t(&QB#WmXRRn>McRGJ$6UQ=jajhnBUWE(|nE%q%`ZfjnPm7=m`HibL5Pot++ zV~PG47havU!iegUZMN2bnc^**zb5~Q{6cjxakq zcE5%=x=I;tc}MPBlOu9X9!o^?)yjTHG-)-``b2%3>_Vg{^tF=W*Md?i+9I7U9kpp+ z|93NIQQw9=w4K&Eu2n{Sybjl?$JUZ;WyIZYly}bhwKzVLjwb5YVIM|wEX>y2*XbwJ zxzwj)KYf1Iny+V3{~m^~Qje@H*-F&TpLYlI?#BRecLOJ_wJFy}Z@LT;SH4qza*QWk zPWlZ|zfOlSLEE=cS!l3qr?v}a>gXr5JvS?B!@JkvOII+0(paLtf=7_W3J%grZBfn} zoF7g|=FY2=WQUBr!He$<}smS0>5f3n<*iV;W8M@)v%R zg@4Mz$7&sSDz831x<%6-k8Abxvzz{VmG4c3>T@bnU$Qk6#rE;Q)|#|4X*V%u{5{Tj<`H+r2U8 zEn1Ub^uyi~@`rdv&7w6w!z0hX=P%Js*o&L+{Ep}@uAEhZ>>plm*B_;21HWs8$2ldz zluef@be%5Bol{ax3rLrft`u|6DV0ncNj1`K+J$pG$Fb}uk7&y7-c+(x5-)yLs+k&* zHY07W)wrPCmQC%+*OMO<BqC_jB^}5 zUr~QYT^5$hN*R-_8TZ^~`dMIu%iL*9k>n}l9ktJ{D63?@6GkcHcV_EPeFycET7%z} zg|g{)^4KI!bG5s9N-fz`KZ$*ne6m*Y52c=LUP%5E`IRJ*dri4$xljHGzRfvP5J#@_ zM#u7OArW0yEic|SQ9Q)!8%j}2q2_E*bf((?t=vtXrZ~ruA11#*)OVuiD8imzPh9;= zDHHTJ<-52~mxsh!G`}8oR_syKQQGRi6`O2&qnDE)BNu7?801;+(rIbinjf}k}TV|vW4IXB2r3`7)w@Rg7E>ck~ygCyLAdJ!K8M zv7c^-T8ITtl%|eLqAOU@QmgcEkwZGZTyE`$LUd~75Bzgk}a92Un?J0uk`zCb>g)$)Y*k{ z4-BNsP@=vj7Mfpb?ccB~StnDQiqo{;Xr+0)Q%Zl_s!wT9QrXW^H#b!0$>w^g+>OY; zNENqawU>E4xkkP{Rdg_@4V}lyPm`Y|>g(dq1yzAr+hp&J1CQP2{2!QHGjA4oX7!YL7u9#P`n46?z18W~*tYDSDA883m2N9q`KZer4aw8VyAt*Fj^aY1 zHH$^{l&yoQeTwC@uWzfxSk*6O$1V!ScI@-*@@I%;hMMB6uUgNlP%4ZPG?!{83ft6N zXEpNrXhtoCs9#0`vz*mV*wlJ*;2!Q0@9$r<^}u5KecMheY-gjI_fRmVNw$DAZMVO= zKsLvvaloZ8DoyL;P^Zi0X%v2g)0p#f8Pm4KbL~r{MGFwo&c1woaDkNmt)M6&*+ z#I_K%lJ$4W*Km_Af4A2Rp==`aKNKo;;5D@lVojL(gCo5I3nd>%)GxCf^Yb7buC6UK zi_%=IrTIIe2hIAcj=c8D`w1keJVqcww2V+i_1gCQnoX$&nM7@7S`Zv?cMXN!w-9>F2)?z(JnXte)a&|a1HYT|5yS}*)2rCYd9 z^FyM(C9Biy&B|CrMMdpxf2Wr_@`ZZv?Z^p8M{SFuuX_xI?i7X*^>wese6_Xqh15!N z^oIlLWZI@;5B-i8^b4Y29kH!2w^AOZYq&}C-yJnW5p^B65`{{gxRp9-+l%VQi_V?6 zGf~*qNo!l2g5wwk-_G1CJLg|=NtW19xR+#CFGHz3D$!h}v(~nxe%EM3VFrbz1^pY- zKS;BbR!4AtPi-rn(eBknn^#6HZ?uMV;Z-%P?IPKJ>Y`mN&t2bqn?jkclC5r6E%y`E zESpoia;%{+xvTiHqMGbjM82GS1ySE1&AFIl;hv~Ivwlx$3og-oy{l%fq}q*v&nRSf z<2i4){QfQJ|3=nIS7qy3cT?O4&+a@4?Jj<)rdBl<>dt+byi|8Bq`JCHc2uH}O<^8U zzr?mIp_le^4b^CfkC&v33p@s_r0r|Gfw6~V3+a*Hw;g?ZiWjxi+2JKAl|gOPqf3*5 zE@^bh6fAC}7GFmda>$2erqLlC-(v?e{L(`^S696!$3Nw$p5xb{kYeUrc zEBz;(C3Y8o*H^Rcy(wqnQ@H4s-BXNipnmC`L%s@YseMP(uUAKA>!X!xsD>IsK3v2e z+I~PpFV13maf;Q8PtB_L;=#nnJrilC2l9b^far zrCO@zW!o?cr)juFRJkniAVvL3>`YbDB3@96?aOoJzWHmrVVnLB9sRgfUaHbpE8kju zB$um6<3c)ZB${cQ!TQa_7x)I>;tBo%|M-%g;kjtvRt<9PW0vCu%YN|iS0OX~WucBPwU4!%1+`OWKi2N_<=aX1n_%zZkHv=t9sheppBB`X zto_*URY7g$7awawdqFL0?8ln9pk}kW^lh2HHGK<;OQwFT1r*daEcjS+ z7S#5x{#c7BsGZvKu@+lU%RBI~rWVxrSIhe!{!3b3DppV|S@>he(gn5Z`oC=W(Qobw z1+}EQA8VBhY8m>whWOF@)hMWCb^2JVTTmOLzue1@-mek02z~R<9R0rbzL-?daUHuz zM}z3lRt=14Nv$>X2h8=-jdS&u;w@@;9%ActvX6Pp(wJ79Jn4hVg?5GZq<5yF-M&S`rUxlq)^l|)( z-;jrUTG!61uk7&5lJcx5g9=%^h)Asb{yr$U4~A)byQoRBwH4Lb5cF7`rA2g8I~lAy zDa7<=W%@H)e{FgXwUcaVO<^{LWyIS0a{M)0FLkEu{GP&dyrR)MAb+>p!-`zjw)Ixm z${`gwT|dcDGm5rYWT5i^UbYyZjqa;9m7Q}a9L6z<7l`@|F`WhM)2{bZYZ>jgsolev zfgFnlO17^DYKa5XH3sK?3Ka(Na5#wLFj1fJ05kq9-V9RTh4-g42n%UmLDUb^HkuD1 zLvs#cD+yaqRTW7?)nGAmh1x?qHB^n09U7f?pz>gjkAr!tNo@M>rVG|84^wx_)*)06 z;ut;h25TN8)WWj)DTT-(yd*h9OB|^VmmRf+u+kLf5cTyt#)UW4j*L?Mjm5sAwhCp2 z@-q5R9t~;NXy1>6(7$o^1RmkBW*Vzz8||6&8;YP$`9IUAywv=u_T_jr**@?Sr@)s{9r}K zlB^x0mu6K+Tb8Br8FDpHEa|IyiL}Y8S~zdzwpUr3Ucb(p^!b|yr>?)vSTXM1gN+BJ zjGRt#R@+5}tf4iH4~9%|%bTFBo6Npq9md6-!f7u0)-Ww%in@jWDN?9DoJY;!JmL_$ ziO*!EtY|V@721Yr>Jr)6m+Bdur^h3rzP-+H-3MqrXQSGRi|&)%7p$`#Cis=UFWm_@p*xQmes)K47kS<#NziXE)@ zFU0Di%1AX`Qx~$Un{QB1M)RMl(b{ics8wY*{h7B2UaWo-&~P+Q60nfw^~9q3g=BSB z1GGDf)v~hn2(_npMZ1?vE51|>k*!fKb~-e2Nw!ult<^G>_q+6>u$aQPF742Ap59mv zQFukcG=~4Pzh52kdWO1MM6Fck+d?R&kKsRsL{7;h{cshaU0$gM$kqwe)?z*F+s9~c zSE)l}>lq4`vAjw&R$*zf;SbQ^-@v z+hmKxjpUulyOa0K7Q^*?Ao)=8VcBB4o@bNikWXg*jcT%aHu*wwzBjPd_iCbL4f%TV z?}+&)W%?FAAVzLd`$X@c_?Sk+`1~vJ-*XB-YOOW?7tmQ2eYU7m&4Cm$$4j<}<28e( zK9wDFDcq;vF(H3$KQ?lVuxwM8m+Htd_X>{!4w`}y4P}C4D?-#yEU)7)1T%{hvD(#b zYO29rkM<^5kMHS!VuEP6Qyt>GNq!rTsXZm?H)jZc;y`$wYl|6Sz z+ce)TXf8=}ZSlncwORNJO0Qv@B-z|2@(Sb? zCyVVz)MRHZ@;YcltqGAONY(jML&P8D^_q5+I-oPnT_=kHN7cIK{^Uc+hfNkg99556 zCy+0}a%yWQiv`EHraQ>@knfu;E|Oc0k^e;gGtpCoA6Fy!oJjsFpj$o5kJgXFUgUfq z_4{AZ=eQbI)J-bMJB?~`L|L=u5;I=IO%`&C_+jqj!TEEGACIf5`6Vsy=ppBdm&eue z5#G5xhUW6>98sU+zfU+qbG4=?)KSfA&*W&r*eukl`TcoXZY_yaz{b!zsI4yQ?)+|-)BXx>Dq6< zF+<>o)f`W!&>0-DW=OVXGqi?(s8-pPPGJg#dBnjif~WskJRi=g&cf`djEG={%Rxz| z*A+y4nnPYzTtPfQy+}c>$DvM!F$-aQ4(R=K`aC*)o;&&Vm@K`s&Oek9jEH)NR!o0b z*Z}e3ni?81ol*o%I=#zRTp9 z)fu@3e?qFk&4l)o__GGj5mJ+J6InzI)mn_ViHk_A%{76(`Sne?nd!7T3=f0) zbs4=2eW?f5%fA8b4bewcVU zm0hfNYiBzzmOMaGE zi+GRNp;P_^9wHX+%*}%h@b4l;<)KMe?n_vQWae?*Vd5MUo4Rpry0gikAJ&!@$zcXtQL`7rt5F+FJcqR2`F+x<&1AYAu`L4naQGp1MGvHbH#B6q%Thv? zzE5v}URVVE5ShsgyeUC{aVYCP)8$3`zttkVp^3)=O8TMlJj*E4mm?E2=!f-v)P7=m z2Jsy667p~zw{Ra%@B(jP>?cKe!2*3AJ241h;5&+=5>NysQLZ2RY9bTWP#X=<6e-YW z*0p20C)P8aRS@42Kgnb~RJ_3n+8SgrZX7N!-K0OGu)#5a@rTKChz-{AbhePYES3^0o>L%B7WvDv3m)ZgegjB(>|fLgcV`~zMBa0hCEz`Q!zwO9p2+P# za3aT3MCLFyBzj>CR%16#;Ws>j&m # else # include diff --git a/deps/icu-small/source/i18n/scriptset.cpp b/deps/icu-small/source/i18n/scriptset.cpp index ab7cb1e68ec449..951fe440803573 100644 --- a/deps/icu-small/source/i18n/scriptset.cpp +++ b/deps/icu-small/source/i18n/scriptset.cpp @@ -258,7 +258,7 @@ void ScriptSet::setScriptExtensions(UChar32 codePoint, UErrorCode& status) { while (TRUE) { script_count = uscript_getScriptExtensions( - codePoint, scripts.getAlias(), FIRST_GUESS_SCRIPT_CAPACITY, &internalStatus); + codePoint, scripts.getAlias(), scripts.getCapacity(), &internalStatus); if (internalStatus == U_BUFFER_OVERFLOW_ERROR) { // Need to allocate more space if (scripts.resize(script_count) == NULL) { diff --git a/deps/icu-small/source/i18n/uspoof.cpp b/deps/icu-small/source/i18n/uspoof.cpp index 3be10862fccdd9..d81b5b2149a206 100644 --- a/deps/icu-small/source/i18n/uspoof.cpp +++ b/deps/icu-small/source/i18n/uspoof.cpp @@ -646,13 +646,6 @@ uspoof_getSkeletonUnicodeString(const USpoofChecker *sc, return dest; } - // Check that at least one of the CONFUSABLE flags is turned on. If not, - // return an error. - if ((This->fChecks & USPOOF_CONFUSABLE) == 0) { - *status = U_ILLEGAL_ARGUMENT_ERROR; - return dest; - } - UnicodeString nfdId; gNfdNormalizer->normalize(id, nfdId, *status); From 785975d922f2e9a27b5615537485f9cab23dbccf Mon Sep 17 00:00:00 2001 From: "Steven R. Loomis" Date: Fri, 9 Dec 2016 12:49:45 -0800 Subject: [PATCH 112/144] deps: ICU 58.2 bump download URL Bump configure download path for ICU from 58.1 to 58.2 * This commit just changes the download URL. PR-URL: https://github.com/nodejs/node/pull/10206 Reviewed-By: Michael Dawson Reviewed-By: Jeremiah Senkpiel Reviewed-By: James M Snell --- configure | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/configure b/configure index 92cf4c821709f7..d926cec7d5d7ea 100755 --- a/configure +++ b/configure @@ -1001,8 +1001,8 @@ def glob_to_var(dir_base, dir_sub, patch_dir): def configure_intl(o): icus = [ { - 'url': 'https://ssl.icu-project.org/files/icu4c/58.1/icu4c-58_1-src.zip', - 'md5': 'd6476b355f66a11fbe12db1d61d90d69', + 'url': 'https://ssl.icu-project.org/files/icu4c/58.2/icu4c-58_2-src.zip', + 'md5': 'f4fca37508fc5d14390501cf17aef084', }, ] def icu_download(path): From 0b2bc5e27b746d816f98154ae85936e544371043 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 23 Dec 2016 02:58:45 +0800 Subject: [PATCH 113/144] benchmark: add benchmark for WHATWG URL properties PR-URL: https://github.com/nodejs/node/pull/10408 Fixes: https://github.com/nodejs/node/issues/10376 Reviewed-By: James M Snell --- benchmark/url/whatwg-url-properties.js | 91 ++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 benchmark/url/whatwg-url-properties.js diff --git a/benchmark/url/whatwg-url-properties.js b/benchmark/url/whatwg-url-properties.js new file mode 100644 index 00000000000000..a3c4d886bd3c90 --- /dev/null +++ b/benchmark/url/whatwg-url-properties.js @@ -0,0 +1,91 @@ +'use strict'; + +var common = require('../common.js'); +var URL = require('url').URL; + +var bench = common.createBenchmark(main, { + url: [ + 'http://example.com/', + 'https://encrypted.google.com/search?q=url&q=site:npmjs.org&hl=en', + 'javascript:alert("node is awesome");', + 'http://user:pass@foo.bar.com:21/aaa/zzz?l=24#test' + ], + prop: ['toString', 'href', 'origin', 'protocol', + 'username', 'password', 'host', 'hostname', 'port', + 'pathname', 'search', 'searchParams', 'hash'], + n: [1e4] +}); + +function setAndGet(n, url, prop, alternative) { + const old = url[prop]; + bench.start(); + for (var i = 0; i < n; i += 1) { + url[prop] = n % 2 === 0 ? alternative : old; // set + url[prop]; // get + } + bench.end(n); +} + +function get(n, url, prop) { + bench.start(); + for (var i = 0; i < n; i += 1) { + url[prop]; // get + } + bench.end(n); +} + +function stringify(n, url, prop) { + bench.start(); + for (var i = 0; i < n; i += 1) { + url.toString(); + } + bench.end(n); +} + +const alternatives = { + href: 'http://user:pass@foo.bar.com:21/aaa/zzz?l=25#test', + protocol: 'https:', + username: 'user2', + password: 'pass2', + host: 'foo.bar.net:22', + hostname: 'foo.bar.org', + port: '23', + pathname: '/aaa/bbb', + search: '?k=99', + hash: '#abcd' +}; + +function getAlternative(prop) { + return alternatives[prop]; +} + +function main(conf) { + const n = conf.n | 0; + const url = new URL(conf.url); + const prop = conf.prop; + + switch (prop) { + case 'protocol': + case 'username': + case 'password': + case 'host': + case 'hostname': + case 'port': + case 'pathname': + case 'search': + case 'hash': + setAndGet(n, url, prop, getAlternative(prop)); + break; + // TODO: move href to the first group when the setter lands. + case 'href': + case 'origin': + case 'searchParams': + get(n, url, prop); + break; + case 'toString': + stringify(n, url); + break; + default: + throw new Error('Unknown prop'); + } +} From 7fbd12f876efa35478362bfe169867a5a2dbb3a4 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Thu, 22 Dec 2016 17:16:08 +0800 Subject: [PATCH 114/144] url: make WHATWG URL properties spec compliant * Set exposed attributes of the interface enumerable and configurable, as required by the spec. See: https://heycam.github.io/webidl/#es-attributes * Make sure `URL#searchParams` returns `[[SameObject]]` * Add the missing `URL#href` setter * Reorder the properties to match https://url.spec.whatwg.org/#api * Add tests for the ECMAScript property attributes PR-URL: https://github.com/nodejs/node/pull/10408 Fixes: https://github.com/nodejs/node/issues/10376 Reviewed-By: James M Snell --- lib/internal/url.js | 716 +++++++++++--------- test/parallel/test-whatwg-url-properties.js | 127 ++++ 2 files changed, 513 insertions(+), 330 deletions(-) create mode 100644 test/parallel/test-whatwg-url-properties.js diff --git a/lib/internal/url.js b/lib/internal/url.js index ba54aa8c233354..9d7448be8f6a20 100644 --- a/lib/internal/url.js +++ b/lib/internal/url.js @@ -85,34 +85,39 @@ class TupleOrigin { } } +// Reused by URL constructor and URL#href setter. +function parse(url, input, base) { + input = String(input); + const base_context = base ? base[context] : undefined; + url[context] = new StorageObject(); + binding.parse(input.trim(), -1, base_context, undefined, + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + throw new TypeError('Invalid URL'); + url[context].flags = flags; + url[context].scheme = protocol; + url[context].username = username; + url[context].password = password; + url[context].port = port; + url[context].path = path; + url[context].query = query; + url[context].fragment = fragment; + url[context].host = host; + if (url[searchParams]) { // invoked from href setter + initSearchParams(url[searchParams], query); + } else { + url[searchParams] = new URLSearchParams(query); + } + url[searchParams][context] = url; + }); +} + class URL { constructor(input, base) { if (base !== undefined && !(base instanceof URL)) base = new URL(String(base)); - input = String(input); - const base_context = base ? base[context] : undefined; - this[context] = new StorageObject(); - binding.parse(input.trim(), -1, base_context, undefined, - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - throw new TypeError('Invalid URL'); - this[context].flags = flags; - this[context].scheme = protocol; - this[context].username = username; - this[context].password = password; - this[context].port = port; - this[context].path = path; - this[context].query = query; - this[context].fragment = fragment; - this[context].host = host; - this[searchParams] = new URLSearchParams(query); - this[searchParams][context] = this; - }); - } - - get origin() { - return URL.originFor(this).toString(true); + parse(this, input, base); } get [special]() { @@ -123,309 +128,6 @@ class URL { return (this[context].flags & binding.URL_FLAGS_CANNOT_BE_BASE) != 0; } - get protocol() { - return this[context].scheme; - } - - get searchParams() { - return this[searchParams]; - } - - set protocol(scheme) { - scheme = String(scheme); - if (scheme.length === 0) - return; - binding.parse(scheme, - binding.kSchemeStart, - null, - this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - const newIsSpecial = (flags & binding.URL_FLAGS_SPECIAL) != 0; - if ((this[special] && !newIsSpecial) || - (!this[special] && newIsSpecial) || - (newIsSpecial && !this[special] && - this[context].host === undefined)) { - return; - } - if (newIsSpecial) { - this[context].flags |= binding.URL_FLAGS_SPECIAL; - } else { - this[context].flags &= ~binding.URL_FLAGS_SPECIAL; - } - if (protocol) { - this[context].scheme = protocol; - this[context].flags |= binding.URL_FLAGS_HAS_SCHEME; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_SCHEME; - } - }); - } - - get username() { - return this[context].username || ''; - } - - set username(username) { - username = String(username); - if (!this.hostname) - return; - if (!username) { - this[context].username = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_USERNAME; - return; - } - this[context].username = binding.encodeAuth(username); - this[context].flags |= binding.URL_FLAGS_HAS_USERNAME; - } - - get password() { - return this[context].password || ''; - } - - set password(password) { - password = String(password); - if (!this.hostname) - return; - if (!password) { - this[context].password = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_PASSWORD; - return; - } - this[context].password = binding.encodeAuth(password); - this[context].flags |= binding.URL_FLAGS_HAS_PASSWORD; - } - - get host() { - var ret = this[context].host || ''; - if (this[context].port !== undefined) - ret += `:${this[context].port}`; - return ret; - } - - set host(host) { - host = String(host); - if (this[cannotBeBase] || - (this[special] && host.length === 0)) { - // Cannot set the host if cannot-be-base is set or - // scheme is special and host length is zero - return; - } - if (!host) { - this[context].host = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; - return; - } - binding.parse(host, binding.kHost, null, this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - if (host) { - this[context].host = host; - this[context].flags |= binding.URL_FLAGS_HAS_HOST; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; - } - if (port !== undefined) - this[context].port = port; - }); - } - - get hostname() { - return this[context].host || ''; - } - - set hostname(host) { - host = String(host); - if (this[cannotBeBase] || - (this[special] && host.length === 0)) { - // Cannot set the host if cannot-be-base is set or - // scheme is special and host length is zero - return; - } - if (!host) { - this[context].host = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; - return; - } - binding.parse(host, - binding.kHostname, - null, - this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - if (host) { - this[context].host = host; - this[context].flags |= binding.URL_FLAGS_HAS_HOST; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; - } - }); - } - - get port() { - const port = this[context].port; - return port === undefined ? '' : String(port); - } - - set port(port) { - if (!this[context].host || this[cannotBeBase] || this.protocol === 'file:') - return; - port = String(port); - if (port === '') { - // Currently, if port number is empty, left unchanged. - // TODO(jasnell): This might be changing in the spec - return; - } - binding.parse(port, binding.kPort, null, this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - this[context].port = port; - }); - } - - get pathname() { - if (this[cannotBeBase]) - return this[context].path[0]; - return this[context].path !== undefined ? - `/${this[context].path.join('/')}` : ''; - } - - set pathname(path) { - if (this[cannotBeBase]) - return; - path = String(path); - binding.parse(path, - binding.kPathStart, - null, - this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - if (path) { - this[context].path = path; - this[context].flags |= binding.URL_FLAGS_HAS_PATH; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_PATH; - } - }); - } - - get search() { - return !this[context].query ? '' : `?${this[context].query}`; - } - - set search(search) { - search = String(search); - if (search[0] === '?') search = search.slice(1); - if (!search) { - this[context].query = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_QUERY; - this[searchParams][searchParams] = {}; - return; - } - this[context].query = ''; - binding.parse(search, - binding.kQuery, - null, - this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - if (query) { - this[context].query = query; - this[context].flags |= binding.URL_FLAGS_HAS_QUERY; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_QUERY; - } - }); - this[searchParams][searchParams] = querystring.parse(search); - } - - get hash() { - return !this[context].fragment ? '' : `#${this[context].fragment}`; - } - - set hash(hash) { - hash = String(hash); - if (this.protocol === 'javascript:') - return; - if (!hash) { - this[context].fragment = null; - this[context].flags &= ~binding.URL_FLAGS_HAS_FRAGMENT; - return; - } - if (hash[0] === '#') hash = hash.slice(1); - this[context].fragment = ''; - binding.parse(hash, - binding.kFragment, - null, - this[context], - (flags, protocol, username, password, - host, port, path, query, fragment) => { - if (flags & binding.URL_FLAGS_FAILED) - return; - if (fragment) { - this[context].fragment = fragment; - this[context].flags |= binding.URL_FLAGS_HAS_FRAGMENT; - } else { - this[context].flags &= ~binding.URL_FLAGS_HAS_FRAGMENT; - } - }); - } - - get href() { - return this.toString(); - } - - toString(options) { - options = options || {}; - const fragment = - options.fragment !== undefined ? - !!options.fragment : true; - const unicode = !!options.unicode; - var ret; - if (this.protocol) - ret = this.protocol; - if (this[context].host !== undefined) { - ret += '//'; - const has_username = typeof this[context].username === 'string'; - const has_password = typeof this[context].password === 'string'; - if (has_username || has_password) { - if (has_username) - ret += this[context].username; - if (has_password) - ret += `:${this[context].password}`; - ret += '@'; - } - if (unicode) { - ret += punycode.toUnicode(this.hostname); - if (this.port !== undefined) - ret += `:${this.port}`; - } else { - ret += this.host; - } - } else if (this[context].scheme === 'file:') { - ret += '//'; - } - if (this.pathname) - ret += this.pathname; - if (typeof this[context].query === 'string') - ret += `?${this[context].query}`; - if (fragment & typeof this[context].fragment === 'string') - ret += `#${this[context].fragment}`; - return ret; - } - inspect(depth, opts) { var ret = 'URL {\n'; ret += ` href: ${this.href}\n`; @@ -456,6 +158,353 @@ class URL { } } +Object.defineProperties(URL.prototype, { + toString: { + // https://heycam.github.io/webidl/#es-stringifier + writable: true, + enumerable: true, + configurable: true, + // eslint-disable-next-line func-name-matching + value: function toString(options) { + options = options || {}; + const fragment = + options.fragment !== undefined ? + !!options.fragment : true; + const unicode = !!options.unicode; + var ret; + if (this.protocol) + ret = this.protocol; + if (this[context].host !== undefined) { + ret += '//'; + const has_username = typeof this[context].username === 'string'; + const has_password = typeof this[context].password === 'string'; + if (has_username || has_password) { + if (has_username) + ret += this[context].username; + if (has_password) + ret += `:${this[context].password}`; + ret += '@'; + } + if (unicode) { + ret += punycode.toUnicode(this.hostname); + if (this.port !== undefined) + ret += `:${this.port}`; + } else { + ret += this.host; + } + } else if (this[context].scheme === 'file:') { + ret += '//'; + } + if (this.pathname) + ret += this.pathname; + if (typeof this[context].query === 'string') + ret += `?${this[context].query}`; + if (fragment & typeof this[context].fragment === 'string') + ret += `#${this[context].fragment}`; + return ret; + } + }, + href: { + enumerable: true, + configurable: true, + get() { + return this.toString(); + }, + set(input) { + parse(this, input); + } + }, + origin: { // readonly + enumerable: true, + configurable: true, + get() { + return originFor(this).toString(true); + } + }, + protocol: { + enumerable: true, + configurable: true, + get() { + return this[context].scheme; + }, + set(scheme) { + scheme = String(scheme); + if (scheme.length === 0) + return; + binding.parse(scheme, + binding.kSchemeStart, + null, + this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + const newIsSpecial = (flags & binding.URL_FLAGS_SPECIAL) != 0; + if ((this[special] && !newIsSpecial) || + (!this[special] && newIsSpecial) || + (newIsSpecial && !this[special] && + this[context].host === undefined)) { + return; + } + if (newIsSpecial) { + this[context].flags |= binding.URL_FLAGS_SPECIAL; + } else { + this[context].flags &= ~binding.URL_FLAGS_SPECIAL; + } + if (protocol) { + this[context].scheme = protocol; + this[context].flags |= binding.URL_FLAGS_HAS_SCHEME; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_SCHEME; + } + }); + } + }, + username: { + enumerable: true, + configurable: true, + get() { + return this[context].username || ''; + }, + set(username) { + username = String(username); + if (!this.hostname) + return; + if (!username) { + this[context].username = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_USERNAME; + return; + } + this[context].username = binding.encodeAuth(username); + this[context].flags |= binding.URL_FLAGS_HAS_USERNAME; + } + }, + password: { + enumerable: true, + configurable: true, + get() { + return this[context].password || ''; + }, + set(password) { + password = String(password); + if (!this.hostname) + return; + if (!password) { + this[context].password = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_PASSWORD; + return; + } + this[context].password = binding.encodeAuth(password); + this[context].flags |= binding.URL_FLAGS_HAS_PASSWORD; + } + }, + host: { + enumerable: true, + configurable: true, + get() { + var ret = this[context].host || ''; + if (this[context].port !== undefined) + ret += `:${this[context].port}`; + return ret; + }, + set(host) { + host = String(host); + if (this[cannotBeBase] || + (this[special] && host.length === 0)) { + // Cannot set the host if cannot-be-base is set or + // scheme is special and host length is zero + return; + } + if (!host) { + this[context].host = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; + return; + } + binding.parse(host, binding.kHost, null, this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + if (host) { + this[context].host = host; + this[context].flags |= binding.URL_FLAGS_HAS_HOST; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; + } + if (port !== undefined) + this[context].port = port; + }); + } + }, + hostname: { + enumerable: true, + configurable: true, + get() { + return this[context].host || ''; + }, + set(host) { + host = String(host); + if (this[cannotBeBase] || + (this[special] && host.length === 0)) { + // Cannot set the host if cannot-be-base is set or + // scheme is special and host length is zero + return; + } + if (!host) { + this[context].host = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; + return; + } + binding.parse(host, + binding.kHostname, + null, + this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + if (host) { + this[context].host = host; + this[context].flags |= binding.URL_FLAGS_HAS_HOST; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_HOST; + } + }); + } + }, + port: { + enumerable: true, + configurable: true, + get() { + const port = this[context].port; + return port === undefined ? '' : String(port); + }, + set(port) { + if (!this[context].host || this[cannotBeBase] || + this.protocol === 'file:') + return; + port = String(port); + if (port === '') { + // Currently, if port number is empty, left unchanged. + // TODO(jasnell): This might be changing in the spec + return; + } + binding.parse(port, binding.kPort, null, this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + this[context].port = port; + }); + } + }, + pathname: { + enumerable: true, + configurable: true, + get() { + if (this[cannotBeBase]) + return this[context].path[0]; + return this[context].path !== undefined ? + `/${this[context].path.join('/')}` : ''; + }, + set(path) { + if (this[cannotBeBase]) + return; + path = String(path); + binding.parse(path, + binding.kPathStart, + null, + this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + if (path) { + this[context].path = path; + this[context].flags |= binding.URL_FLAGS_HAS_PATH; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_PATH; + } + }); + } + }, + search: { + enumerable: true, + configurable: true, + get() { + return !this[context].query ? '' : `?${this[context].query}`; + }, + set(search) { + search = String(search); + if (search[0] === '?') search = search.slice(1); + if (!search) { + this[context].query = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_QUERY; + this[searchParams][searchParams] = {}; + return; + } + this[context].query = ''; + binding.parse(search, + binding.kQuery, + null, + this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + if (query) { + this[context].query = query; + this[context].flags |= binding.URL_FLAGS_HAS_QUERY; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_QUERY; + } + }); + this[searchParams][searchParams] = querystring.parse(search); + } + }, + searchParams: { // readonly + enumerable: true, + configurable: true, + get() { + return this[searchParams]; + } + }, + hash: { + enumerable: true, + configurable: true, + get() { + return !this[context].fragment ? '' : `#${this[context].fragment}`; + }, + set(hash) { + hash = String(hash); + if (this.protocol === 'javascript:') + return; + if (!hash) { + this[context].fragment = null; + this[context].flags &= ~binding.URL_FLAGS_HAS_FRAGMENT; + return; + } + if (hash[0] === '#') hash = hash.slice(1); + this[context].fragment = ''; + binding.parse(hash, + binding.kFragment, + null, + this[context], + (flags, protocol, username, password, + host, port, path, query, fragment) => { + if (flags & binding.URL_FLAGS_FAILED) + return; + if (fragment) { + this[context].fragment = fragment; + this[context].flags |= binding.URL_FLAGS_HAS_FRAGMENT; + } else { + this[context].flags &= ~binding.URL_FLAGS_HAS_FRAGMENT; + } + }); + } + } +}); + var hexTable = new Array(256); for (var i = 0; i < 256; ++i) hexTable[i] = '%' + ((i < 16 ? '0' : '') + i.toString(16)).toUpperCase(); @@ -546,6 +595,12 @@ function getSearchParamPairs(target) { return values; } +// Reused by the URL parse function invoked by +// the href setter, and the URLSearchParams constructor +function initSearchParams(url, init) { + url[searchParams] = querystring.parse(init); +} + class URLSearchParams { constructor(init = '') { if (init instanceof URLSearchParams) { @@ -554,7 +609,7 @@ class URLSearchParams { } else { init = String(init); if (init[0] === '?') init = init.slice(1); - this[searchParams] = querystring.parse(init); + initSearchParams(this, init); } // "associated url object" @@ -790,7 +845,7 @@ Object.defineProperty(URLSearchParamsIteratorPrototype, Symbol.toStringTag, { configurable: true }); -URL.originFor = function(url, base) { +function originFor(url, base) { if (!(url instanceof URL)) url = new URL(url, base); var origin; @@ -822,8 +877,9 @@ URL.originFor = function(url, base) { origin = new OpaqueOrigin(); } return origin; -}; +} +URL.originFor = originFor; URL.domainToASCII = function(domain) { return binding.domainToASCII(String(domain)); }; diff --git a/test/parallel/test-whatwg-url-properties.js b/test/parallel/test-whatwg-url-properties.js new file mode 100644 index 00000000000000..60cf581ad8da4d --- /dev/null +++ b/test/parallel/test-whatwg-url-properties.js @@ -0,0 +1,127 @@ +'use strict'; + +require('../common'); + +const URL = require('url').URL; +const assert = require('assert'); + +const url = new URL('http://user:pass@foo.bar.com:21/aaa/zzz?l=24#test'); +const oldParams = url.searchParams; // for test of [SameObject] + +// To retrieve enumerable but not necessarily own properties, +// we need to use the for-in loop. +const props = []; +for (const prop in url) { + props.push(prop); +} + +// See: https://url.spec.whatwg.org/#api +// https://heycam.github.io/webidl/#es-attributes +// https://heycam.github.io/webidl/#es-stringifier +const expected = ['toString', + 'href', 'origin', 'protocol', + 'username', 'password', 'host', 'hostname', 'port', + 'pathname', 'search', 'searchParams', 'hash']; + +assert.deepStrictEqual(props, expected); + +// href is writable (not readonly) and is stringifier +assert.strictEqual(url.toString(), url.href); +url.href = 'http://user:pass@foo.bar.com:21/aaa/zzz?l=25#test'; +assert.strictEqual(url.href, + 'http://user:pass@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual(url.toString(), url.href); +// Return true because it's configurable, but because the properties +// are defined on the prototype per the spec, the deletion has no effect +assert.strictEqual((delete url.href), true); +assert.strictEqual(url.href, + 'http://user:pass@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual(url.searchParams, oldParams); // [SameObject] + +// searchParams is readonly. Under strict mode setting a +// non-writable property should throw. +// Note: this error message is subject to change in V8 updates +assert.throws(() => url.origin = 'http://foo.bar.com:22', + new RegExp('TypeError: Cannot set property origin of' + + ' \\[object Object\\] which has only a getter')); +assert.strictEqual(url.origin, 'http://foo.bar.com:21'); +assert.strictEqual(url.toString(), + 'http://user:pass@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.origin), true); +assert.strictEqual(url.origin, 'http://foo.bar.com:21'); + +// The following properties should be writable (not readonly) +url.protocol = 'https:'; +assert.strictEqual(url.protocol, 'https:'); +assert.strictEqual(url.toString(), + 'https://user:pass@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.protocol), true); +assert.strictEqual(url.protocol, 'https:'); + +url.username = 'user2'; +assert.strictEqual(url.username, 'user2'); +assert.strictEqual(url.toString(), + 'https://user2:pass@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.username), true); +assert.strictEqual(url.username, 'user2'); + +url.password = 'pass2'; +assert.strictEqual(url.password, 'pass2'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.com:21/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.password), true); +assert.strictEqual(url.password, 'pass2'); + +url.host = 'foo.bar.net:22'; +assert.strictEqual(url.host, 'foo.bar.net:22'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.net:22/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.host), true); +assert.strictEqual(url.host, 'foo.bar.net:22'); + +url.hostname = 'foo.bar.org'; +assert.strictEqual(url.hostname, 'foo.bar.org'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:22/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.hostname), true); +assert.strictEqual(url.hostname, 'foo.bar.org'); + +url.port = '23'; +assert.strictEqual(url.port, '23'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:23/aaa/zzz?l=25#test'); +assert.strictEqual((delete url.port), true); +assert.strictEqual(url.port, '23'); + +url.pathname = '/aaa/bbb'; +assert.strictEqual(url.pathname, '/aaa/bbb'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:23/aaa/bbb?l=25#test'); +assert.strictEqual((delete url.pathname), true); +assert.strictEqual(url.pathname, '/aaa/bbb'); + +url.search = '?k=99'; +assert.strictEqual(url.search, '?k=99'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:23/aaa/bbb?k=99#test'); +assert.strictEqual((delete url.search), true); +assert.strictEqual(url.search, '?k=99'); + +url.hash = '#abcd'; +assert.strictEqual(url.hash, '#abcd'); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:23/aaa/bbb?k=99#abcd'); +assert.strictEqual((delete url.hash), true); +assert.strictEqual(url.hash, '#abcd'); + +// searchParams is readonly. Under strict mode setting a +// non-writable property should throw. +// Note: this error message is subject to change in V8 updates +assert.throws(() => url.searchParams = '?k=88', + new RegExp('TypeError: Cannot set property searchParams of' + + ' \\[object Object\\] which has only a getter')); +assert.strictEqual(url.searchParams, oldParams); +assert.strictEqual(url.toString(), + 'https://user2:pass2@foo.bar.org:23/aaa/bbb?k=99#abcd'); +assert.strictEqual((delete url.searchParams), true); +assert.strictEqual(url.searchParams, oldParams); From 78e8aa81c9aeb43b61295610f520560e47b9653f Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 27 Dec 2016 22:33:08 -0800 Subject: [PATCH 115/144] test: refactor test-stream-pipe-after-end MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * replace `process.on('exit', ...)` checks with `common.mustCall()` * assert.equal() -> assert.strictEqual() * provide duration of 1ms to timer without a duration * remove unused function argument PR-URL: https://github.com/nodejs/node/pull/10483 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas --- test/parallel/test-stream-pipe-after-end.js | 27 ++++++--------------- 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js index 1193db21835cb1..79ebb2c0e0990d 100644 --- a/test/parallel/test-stream-pipe-after-end.js +++ b/test/parallel/test-stream-pipe-after-end.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('assert'); const Readable = require('_stream_readable'); const Writable = require('_stream_writable'); @@ -13,7 +13,7 @@ function TestReadable(opt) { this._ended = false; } -TestReadable.prototype._read = function(n) { +TestReadable.prototype._read = function() { if (this._ended) this.emit('error', new Error('_read called twice')); this._ended = true; @@ -35,31 +35,18 @@ TestWritable.prototype._write = function(chunk, encoding, cb) { // this one should not emit 'end' until we read() from it later. const ender = new TestReadable(); -let enderEnded = false; // what happens when you pipe() a Readable that's already ended? const piper = new TestReadable(); // pushes EOF null, and length=0, so this will trigger 'end' piper.read(); -setTimeout(function() { - ender.on('end', function() { - enderEnded = true; - }); - assert(!enderEnded); +setTimeout(common.mustCall(function() { + ender.on('end', common.mustCall(function() {})); const c = ender.read(); - assert.equal(c, null); + assert.strictEqual(c, null); const w = new TestWritable(); - let writableFinished = false; - w.on('finish', function() { - writableFinished = true; - }); + w.on('finish', common.mustCall(function() {})); piper.pipe(w); - - process.on('exit', function() { - assert(enderEnded); - assert(writableFinished); - console.log('ok'); - }); -}); +}), 1); From 5fc93ee8413dea236f9c50c8a67d1dc3f7df0d3d Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Mon, 26 Dec 2016 09:43:06 -0500 Subject: [PATCH 116/144] test: refactor the code in test-http-connect * use common.mustCall to control the functions execution automatically * use let and const instead of var * use assert.strictEqual instead of assert.equal * use arrow functions * remove console.error and unnecessary variables PR-URL: https://github.com/nodejs/node/pull/10397 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Luigi Pinca --- test/parallel/test-http-connect.js | 78 +++++++++++++----------------- 1 file changed, 33 insertions(+), 45 deletions(-) diff --git a/test/parallel/test-http-connect.js b/test/parallel/test-http-connect.js index 9da199b8ee9a0d..fa2d8aaa4f1ccf 100644 --- a/test/parallel/test-http-connect.js +++ b/test/parallel/test-http-connect.js @@ -1,85 +1,73 @@ 'use strict'; const common = require('../common'); -var assert = require('assert'); -var http = require('http'); +const assert = require('assert'); +const http = require('http'); -var serverGotConnect = false; -var clientGotConnect = false; +const server = http.createServer(common.fail); -var server = http.createServer(common.fail); -server.on('connect', function(req, socket, firstBodyChunk) { - assert.equal(req.method, 'CONNECT'); - assert.equal(req.url, 'google.com:443'); - console.error('Server got CONNECT request'); - serverGotConnect = true; +server.on('connect', common.mustCall((req, socket, firstBodyChunk) => { + assert.strictEqual(req.method, 'CONNECT'); + assert.strictEqual(req.url, 'google.com:443'); socket.write('HTTP/1.1 200 Connection established\r\n\r\n'); - var data = firstBodyChunk.toString(); - socket.on('data', function(buf) { + let data = firstBodyChunk.toString(); + socket.on('data', (buf) => { data += buf.toString(); }); - socket.on('end', function() { + + socket.on('end', common.mustCall(() => { socket.end(data); - }); -}); -server.listen(0, function() { - var req = http.request({ + })); +})); + +server.listen(0, common.mustCall(function() { + const req = http.request({ port: this.address().port, method: 'CONNECT', path: 'google.com:443' }, common.fail); - var clientRequestClosed = false; - req.on('close', function() { - clientRequestClosed = true; - }); - - req.on('connect', function(res, socket, firstBodyChunk) { - console.error('Client got CONNECT request'); - clientGotConnect = true; + req.on('close', common.mustCall(() => {})); + req.on('connect', common.mustCall((res, socket, firstBodyChunk) => { // Make sure this request got removed from the pool. - var name = 'localhost:' + server.address().port; + const name = 'localhost:' + server.address().port; assert(!http.globalAgent.sockets.hasOwnProperty(name)); assert(!http.globalAgent.requests.hasOwnProperty(name)); // Make sure this socket has detached. assert(!socket.ondata); assert(!socket.onend); - assert.equal(socket.listeners('connect').length, 0); - assert.equal(socket.listeners('data').length, 0); + assert.strictEqual(socket.listeners('connect').length, 0); + assert.strictEqual(socket.listeners('data').length, 0); // the stream.Duplex onend listener // allow 0 here, so that i can run the same test on streams1 impl assert(socket.listeners('end').length <= 1); - assert.equal(socket.listeners('free').length, 0); - assert.equal(socket.listeners('close').length, 0); - assert.equal(socket.listeners('error').length, 0); - assert.equal(socket.listeners('agentRemove').length, 0); + assert.strictEqual(socket.listeners('free').length, 0); + assert.strictEqual(socket.listeners('close').length, 0); + assert.strictEqual(socket.listeners('error').length, 0); + assert.strictEqual(socket.listeners('agentRemove').length, 0); - var data = firstBodyChunk.toString(); - socket.on('data', function(buf) { + let data = firstBodyChunk.toString(); + socket.on('data', (buf) => { data += buf.toString(); }); - socket.on('end', function() { - assert.equal(data, 'HeadBody'); - assert(clientRequestClosed); + + socket.on('end', common.mustCall(() => { + assert.strictEqual(data, 'HeadBody'); server.close(); - }); + })); + socket.write('Body'); socket.end(); - }); + })); // It is legal for the client to send some data intended for the server // before the "200 Connection established" (or any other success or // error code) is received. req.write('Head'); req.end(); -}); - -process.on('exit', function() { - assert.ok(serverGotConnect); - assert.ok(clientGotConnect); -}); +})); From 96c3c65a86a2727566f04344354d28b48ee9c077 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 29 Dec 2016 16:16:37 -0800 Subject: [PATCH 117/144] doc: require two-factor authentication MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Collaborators have elevated privileges. The CTC now requires Collaborator accounts to have two-factor authentication. This changes wording in the onboarding documentation to make it clear that two-factor authentication is required and not merely recommended. PR-URL: https://github.com/nodejs/node/pull/10529 Reviewed-By: Colin Ihrig Reviewed-By: Evan Lucas Reviewed-By: Ali Ijaz Sheikh Reviewed-By: Сковорода Никита Андреевич Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Gibson Fahnestock Reviewed-By: Luigi Pinca Reviewed-By: Matteo Collina --- doc/onboarding.md | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/doc/onboarding.md b/doc/onboarding.md index 665890da968699..dbe3dfd569b7ae 100644 --- a/doc/onboarding.md +++ b/doc/onboarding.md @@ -5,14 +5,19 @@ onboarding session. ## One week before the onboarding session -* Ask the new Collaborator if they are using two-factor authentication on their - GitHub account. If they are not, suggest that they enable it as their account - will have elevated privileges in many of the Node.js repositories. +* Confirm that the new Collaborator is using two-factor authentication on their + GitHub account. Unless two-factor authentication is enabled, do not give an + account elevated privileges such as the ability to land code in the main + repository or to start continuous integration (CI) jobs. ## Fifteen minutes before the onboarding session -* Prior to the onboarding session, add the new Collaborators to -[the Collaborators team](https://github.com/orgs/nodejs/teams/collaborators). +* Prior to the onboarding session, add the new Collaborator to + [the Collaborators team](https://github.com/orgs/nodejs/teams/collaborators). + Note that this is the step that gives the account elevated privileges, so + do not perform this step (or any subsequent steps) unless two-factor + authentication is enabled on the new Collaborator's GitHub account. + ## Onboarding session From 4a16f9b05428fed434aeffda5dafb46cf34cbada Mon Sep 17 00:00:00 2001 From: Adrian Estrada Date: Tue, 27 Dec 2016 21:43:44 -0500 Subject: [PATCH 118/144] test: improve test-fs-empty-readStream.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * use const instead of var * use common.mustCall to control functions execution * use assert.strictEqual instead of assert.equal * use arrow functions PR-URL: https://github.com/nodejs/node/pull/10479 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Italo A. Casas Reviewed-By: Luigi Pinca Reviewed-By: Brian White --- test/parallel/test-fs-empty-readStream.js | 32 +++++++++++------------ 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/test/parallel/test-fs-empty-readStream.js b/test/parallel/test-fs-empty-readStream.js index c5a016f9ea2c36..858d07e4f0b982 100644 --- a/test/parallel/test-fs-empty-readStream.js +++ b/test/parallel/test-fs-empty-readStream.js @@ -1,30 +1,30 @@ 'use strict'; -var common = require('../common'); -var assert = require('assert'); -var path = require('path'); -var fs = require('fs'); +const common = require('../common'); +const assert = require('assert'); +const path = require('path'); +const fs = require('fs'); -var emptyFile = path.join(common.fixturesDir, 'empty.txt'); +const emptyFile = path.join(common.fixturesDir, 'empty.txt'); -fs.open(emptyFile, 'r', function(error, fd) { +fs.open(emptyFile, 'r', common.mustCall((error, fd) => { assert.ifError(error); - var read = fs.createReadStream(emptyFile, { 'fd': fd }); + const read = fs.createReadStream(emptyFile, { 'fd': fd }); - read.once('data', function() { + read.once('data', () => { throw new Error('data event should not emit'); }); read.once('end', common.mustCall(function endEvent1() {})); -}); +})); -fs.open(emptyFile, 'r', function(error, fd) { +fs.open(emptyFile, 'r', common.mustCall((error, fd) => { assert.ifError(error); - var read = fs.createReadStream(emptyFile, { 'fd': fd }); + const read = fs.createReadStream(emptyFile, { 'fd': fd }); read.pause(); - read.once('data', function() { + read.once('data', () => { throw new Error('data event should not emit'); }); @@ -32,7 +32,7 @@ fs.open(emptyFile, 'r', function(error, fd) { throw new Error('end event should not emit'); }); - setTimeout(function() { - assert.equal(read.isPaused(), true); - }, common.platformTimeout(50)); -}); + setTimeout(common.mustCall(() => { + assert.strictEqual(read.isPaused(), true); + }), common.platformTimeout(50)); +})); From deb0917f76c23f97681d49d9e04cbdc8f0566a22 Mon Sep 17 00:00:00 2001 From: Matthew Garrett Date: Mon, 26 Dec 2016 22:45:16 +0000 Subject: [PATCH 119/144] doc: warn about unvalidated input in child_process child_process.exec*() and child_process.spawn*() (if options.shell is true) allow trivial arbitrary command execution if code passes unsanitised user input to it. Add warnings in the docs to make that clear. PR-URL: https://github.com/nodejs/node/pull/10466 Reviewed-By: Gibson Fahnestock Reviewed-By: Sam Roberts Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- doc/api/child_process.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/doc/api/child_process.md b/doc/api/child_process.md index 110de3bd869e05..0147242c5583a0 100644 --- a/doc/api/child_process.md +++ b/doc/api/child_process.md @@ -149,6 +149,10 @@ added: v0.1.90 Spawns a shell then executes the `command` within that shell, buffering any generated output. +**Note: Never pass unsanitised user input to this function. Any input +containing shell metacharacters may be used to trigger arbitrary command +execution.** + ```js const exec = require('child_process').exec; exec('cat *.js bad_file | wc -l', (error, stdout, stderr) => { @@ -324,6 +328,10 @@ The `child_process.spawn()` method spawns a new process using the given `command`, with command line arguments in `args`. If omitted, `args` defaults to an empty array. +**Note: If the `shell` option is enabled, do not pass unsanitised user input to +this function. Any input containing shell metacharacters may be used to +trigger arbitrary command execution.** + A third argument may be used to specify additional options, with these defaults: ```js @@ -645,6 +653,10 @@ If the process times out, or has a non-zero exit code, this method ***will*** throw. The [`Error`][] object will contain the entire result from [`child_process.spawnSync()`][] +**Note: Never pass unsanitised user input to this function. Any input +containing shell metacharacters may be used to trigger arbitrary command +execution.** + ### child_process.spawnSync(command[, args][, options]) + +Streams can be either [Readable][], [Writable][], or both ([Duplex][]). + +All streams are EventEmitters, but they also have other custom methods +and properties depending on whether they are Readable, Writable, or +Duplex. + +If a stream is both Readable and Writable, then it implements all of +the methods and events. So, a [Duplex][] or [Transform][] stream is +fully described by this API, though their implementation may be +somewhat different. + +It is not necessary to implement Stream interfaces in order to consume +streams in your programs. If you **are** implementing streaming +interfaces in your own program, please also refer to +[API for Stream Implementors][]. + +Almost all Node.js programs, no matter how simple, use Streams in some +way. Here is an example of using Streams in an Node.js program: + +```js +const http = require('http'); + +var server = http.createServer( (req, res) => { + // req is an http.IncomingMessage, which is a Readable Stream + // res is an http.ServerResponse, which is a Writable Stream + + var body = ''; + // we want to get the data as utf8 strings + // If you don't set an encoding, then you'll get Buffer objects + req.setEncoding('utf8'); + + // Readable streams emit 'data' events once a listener is added + req.on('data', (chunk) => { + body += chunk; + }); + + // the end event tells you that you have entire body + req.on('end', () => { + try { + var data = JSON.parse(body); + } catch (er) { + // uh oh! bad json! + res.statusCode = 400; + return res.end(`error: ${er.message}`); + } + + // write back something interesting to the user: + res.write(typeof data); + res.end(); + }); +}); + +server.listen(1337); + +// $ curl localhost:1337 -d '{}' +// object +// $ curl localhost:1337 -d '"foo"' +// string +// $ curl localhost:1337 -d 'not json' +// error: Unexpected token o +``` + +### Class: stream.Duplex + +Duplex streams are streams that implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Duplex streams include: + +* [TCP sockets][] +* [zlib streams][zlib] +* [crypto streams][crypto] + +### Class: stream.Readable + + + +The Readable stream interface is the abstraction for a *source* of +data that you are reading from. In other words, data comes *out* of a +Readable stream. + +A Readable stream will not start emitting data until you indicate that +you are ready to receive it. + +Readable streams have two "modes": a **flowing mode** and a **paused +mode**. When in flowing mode, data is read from the underlying system +and provided to your program as fast as possible. In paused mode, you +must explicitly call [`stream.read()`][stream-read] to get chunks of data out. +Streams start out in paused mode. + +**Note**: If no data event handlers are attached, and there are no +[`stream.pipe()`][] destinations, and the stream is switched into flowing +mode, then data will be lost. + +You can switch to flowing mode by doing any of the following: + +* Adding a [`'data'`][] event handler to listen for data. +* Calling the [`stream.resume()`][stream-resume] method to explicitly open the + flow. +* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. + +You can switch back to paused mode by doing either of the following: + +* If there are no pipe destinations, by calling the + [`stream.pause()`][stream-pause] method. +* If there are pipe destinations, by removing any [`'data'`][] event + handlers, and removing all pipe destinations by calling the + [`stream.unpipe()`][] method. + +Note that, for backwards compatibility reasons, removing [`'data'`][] +event handlers will **not** automatically pause the stream. Also, if +there are piped destinations, then calling [`stream.pause()`][stream-pause] will +not guarantee that the stream will *remain* paused once those +destinations drain and ask for more data. + +Examples of readable streams include: + +* [HTTP responses, on the client][http-incoming-message] +* [HTTP requests, on the server][http-incoming-message] +* [fs read streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdout and stderr][] +* [`process.stdin`][] + +#### Event: 'close' + +Emitted when the stream and any of its underlying resources (a file +descriptor, for example) have been closed. The event indicates that +no more events will be emitted, and no further computation will occur. + +Not all streams will emit the `'close'` event. + +#### Event: 'data' + +* `chunk` {Buffer|String} The chunk of data. + +Attaching a `'data'` event listener to a stream that has not been +explicitly paused will switch the stream into flowing mode. Data will +then be passed as soon as it is available. + +If you just want to get all the data out of the stream as fast as +possible, this is the best way to do so. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); +}); +``` + +#### Event: 'end' + +This event fires when there will be no more data to read. + +Note that the `'end'` event **will not fire** unless the data is +completely consumed. This can be done by switching into flowing mode, +or by calling [`stream.read()`][stream-read] repeatedly until you get to the +end. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); +}); +readable.on('end', () => { + console.log('there will be no more data.'); +}); +``` + +#### Event: 'error' + +* {Error Object} + +Emitted if there was an error receiving data. + +#### Event: 'readable' + +When a chunk of data can be read from the stream, it will emit a +`'readable'` event. + +In some cases, listening for a `'readable'` event will cause some data +to be read into the internal buffer from the underlying system, if it +hadn't already. + +```javascript +var readable = getReadableStreamSomehow(); +readable.on('readable', () => { + // there is some data to read now +}); +``` + +Once the internal buffer is drained, a `'readable'` event will fire +again when more data is available. + +The `'readable'` event is not emitted in the "flowing" mode with the +sole exception of the last one, on end-of-stream. + +The `'readable'` event indicates that the stream has new information: +either new data is available or the end of the stream has been reached. +In the former case, [`stream.read()`][stream-read] will return that data. In the +latter case, [`stream.read()`][stream-read] will return null. For instance, in +the following example, `foo.txt` is an empty file: + +```js +const fs = require('fs'); +var rr = fs.createReadStream('foo.txt'); +rr.on('readable', () => { + console.log('readable:', rr.read()); +}); +rr.on('end', () => { + console.log('end'); +}); +``` + +The output of running this script is: + +``` +$ node test.js +readable: null +end +``` + +#### readable.isPaused() + +* Return: {Boolean} + +This method returns whether or not the `readable` has been **explicitly** +paused by client code (using [`stream.pause()`][stream-pause] without a +corresponding [`stream.resume()`][stream-resume]). + +```js +var readable = new stream.Readable + +readable.isPaused() // === false +readable.pause() +readable.isPaused() // === true +readable.resume() +readable.isPaused() // === false +``` + +#### readable.pause() + +* Return: `this` + +This method will cause a stream in flowing mode to stop emitting +[`'data'`][] events, switching out of flowing mode. Any data that becomes +available will remain in the internal buffer. + +```js +var readable = getReadableStreamSomehow(); +readable.on('data', (chunk) => { + console.log('got %d bytes of data', chunk.length); + readable.pause(); + console.log('there will be no more data for 1 second'); + setTimeout(() => { + console.log('now data will start flowing again'); + readable.resume(); + }, 1000); +}); +``` + +#### readable.pipe(destination[, options]) + +* `destination` {stream.Writable} The destination for writing data +* `options` {Object} Pipe options + * `end` {Boolean} End the writer when the reader ends. Default = `true` + +This method pulls all the data out of a readable stream, and writes it +to the supplied destination, automatically managing the flow so that +the destination is not overwhelmed by a fast readable stream. + +Multiple destinations can be piped to safely. + +```js +var readable = getReadableStreamSomehow(); +var writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt' +readable.pipe(writable); +``` + +This function returns the destination stream, so you can set up pipe +chains like so: + +```js +var r = fs.createReadStream('file.txt'); +var z = zlib.createGzip(); +var w = fs.createWriteStream('file.txt.gz'); +r.pipe(z).pipe(w); +``` + +For example, emulating the Unix `cat` command: + +```js +process.stdin.pipe(process.stdout); +``` + +By default [`stream.end()`][stream-end] is called on the destination when the +source stream emits [`'end'`][], so that `destination` is no longer writable. +Pass `{ end: false }` as `options` to keep the destination stream open. + +This keeps `writer` open so that "Goodbye" can be written at the +end. + +```js +reader.pipe(writer, { end: false }); +reader.on('end', () => { + writer.end('Goodbye\n'); +}); +``` + +Note that [`process.stderr`][] and [`process.stdout`][] are never closed until +the process exits, regardless of the specified options. + +#### readable.read([size]) + +* `size` {Number} Optional argument to specify how much data to read. +* Return {String|Buffer|Null} + +The `read()` method pulls some data out of the internal buffer and +returns it. If there is no data available, then it will return +`null`. + +If you pass in a `size` argument, then it will return that many +bytes. If `size` bytes are not available, then it will return `null`, +unless we've ended, in which case it will return the data remaining +in the buffer. + +If you do not specify a `size` argument, then it will return all the +data in the internal buffer. + +This method should only be called in paused mode. In flowing mode, +this method is called automatically until the internal buffer is +drained. + +```js +var readable = getReadableStreamSomehow(); +readable.on('readable', () => { + var chunk; + while (null !== (chunk = readable.read())) { + console.log('got %d bytes of data', chunk.length); + } +}); +``` + +If this method returns a data chunk, then it will also trigger the +emission of a [`'data'`][] event. + +Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] +event has been triggered will return `null`. No runtime error will be raised. + +#### readable.resume() + +* Return: `this` + +This method will cause the readable stream to resume emitting [`'data'`][] +events. + +This method will switch the stream into flowing mode. If you do *not* +want to consume the data from a stream, but you *do* want to get to +its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open +the flow of data. + +```js +var readable = getReadableStreamSomehow(); +readable.resume(); +readable.on('end', () => { + console.log('got to the end, but did not read anything'); +}); +``` + +#### readable.setEncoding(encoding) + +* `encoding` {String} The encoding to use. +* Return: `this` + +Call this function to cause the stream to return strings of the specified +encoding instead of Buffer objects. For example, if you do +`readable.setEncoding('utf8')`, then the output data will be interpreted as +UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, +then the data will be encoded in hexadecimal string format. + +This properly handles multi-byte characters that would otherwise be +potentially mangled if you simply pulled the Buffers directly and +called [`buf.toString(encoding)`][] on them. If you want to read the data +as strings, always use this method. + +Also you can disable any encoding at all with `readable.setEncoding(null)`. +This approach is very useful if you deal with binary data or with large +multi-byte strings spread out over multiple chunks. + +```js +var readable = getReadableStreamSomehow(); +readable.setEncoding('utf8'); +readable.on('data', (chunk) => { + assert.equal(typeof chunk, 'string'); + console.log('got %d characters of string data', chunk.length); +}); +``` + +#### readable.unpipe([destination]) + +* `destination` {stream.Writable} Optional specific stream to unpipe + +This method will remove the hooks set up for a previous [`stream.pipe()`][] +call. + +If the destination is not specified, then all pipes are removed. + +If the destination is specified, but no pipe is set up for it, then +this is a no-op. + +```js +var readable = getReadableStreamSomehow(); +var writable = fs.createWriteStream('file.txt'); +// All the data from readable goes into 'file.txt', +// but only for the first second +readable.pipe(writable); +setTimeout(() => { + console.log('stop writing to file.txt'); + readable.unpipe(writable); + console.log('manually close the file stream'); + writable.end(); +}, 1000); +``` + +#### readable.unshift(chunk) + +* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue + +This is useful in certain cases where a stream is being consumed by a +parser, which needs to "un-consume" some data that it has +optimistically pulled out of the source, so that the stream can be +passed on to some other party. + +Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event +has been triggered; a runtime error will be raised. + +If you find that you must often call `stream.unshift(chunk)` in your +programs, consider implementing a [Transform][] stream instead. (See [API +for Stream Implementors][].) + +```js +// Pull off a header delimited by \n\n +// use unshift() if we get too much +// Call the callback with (error, header, stream) +const StringDecoder = require('string_decoder').StringDecoder; +function parseHeader(stream, callback) { + stream.on('error', callback); + stream.on('readable', onReadable); + var decoder = new StringDecoder('utf8'); + var header = ''; + function onReadable() { + var chunk; + while (null !== (chunk = stream.read())) { + var str = decoder.write(chunk); + if (str.match(/\n\n/)) { + // found the header boundary + var split = str.split(/\n\n/); + header += split.shift(); + var remaining = split.join('\n\n'); + var buf = new Buffer(remaining, 'utf8'); + if (buf.length) + stream.unshift(buf); + stream.removeListener('error', callback); + stream.removeListener('readable', onReadable); + // now the body of the message can be read from the stream. + callback(null, header, stream); + } else { + // still reading the header. + header += str; + } + } + } +} +``` + +Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` +will not end the reading process by resetting the internal reading state of the +stream. This can cause unexpected results if `unshift()` is called during a +read (i.e. from within a [`stream._read()`][stream-_read] implementation on a +custom stream). Following the call to `unshift()` with an immediate +[`stream.push('')`][stream-push] will reset the reading state appropriately, +however it is best to simply avoid calling `unshift()` while in the process of +performing a read. + +#### readable.wrap(stream) + +* `stream` {Stream} An "old style" readable stream + +Versions of Node.js prior to v0.10 had streams that did not implement the +entire Streams API as it is today. (See [Compatibility][] for +more information.) + +If you are using an older Node.js library that emits [`'data'`][] events and +has a [`stream.pause()`][stream-pause] method that is advisory only, then you +can use the `wrap()` method to create a [Readable][] stream that uses the old +stream as its data source. + +You will very rarely ever need to call this function, but it exists +as a convenience for interacting with old Node.js programs and libraries. + +For example: + +```js +const OldReader = require('./old-api-module.js').OldReader; +const Readable = require('stream').Readable; +const oreader = new OldReader; +const myReader = new Readable().wrap(oreader); + +myReader.on('readable', () => { + myReader.read(); // etc. +}); +``` + +### Class: stream.Transform + +Transform streams are [Duplex][] streams where the output is in some way +computed from the input. They implement both the [Readable][] and +[Writable][] interfaces. + +Examples of Transform streams include: + +* [zlib streams][zlib] +* [crypto streams][crypto] + +### Class: stream.Writable + + + +The Writable stream interface is an abstraction for a *destination* +that you are writing data *to*. + +Examples of writable streams include: + +* [HTTP requests, on the client][] +* [HTTP responses, on the server][] +* [fs write streams][] +* [zlib streams][zlib] +* [crypto streams][crypto] +* [TCP sockets][] +* [child process stdin][] +* [`process.stdout`][], [`process.stderr`][] + +#### Event: 'drain' + +If a [`stream.write(chunk)`][stream-write] call returns `false`, then the +`'drain'` event will indicate when it is appropriate to begin writing more data +to the stream. + +```js +// Write the data to the supplied writable stream one million times. +// Be attentive to back-pressure. +function writeOneMillionTimes(writer, data, encoding, callback) { + var i = 1000000; + write(); + function write() { + var ok = true; + do { + i -= 1; + if (i === 0) { + // last time! + writer.write(data, encoding, callback); + } else { + // see if we should continue, or wait + // don't pass the callback, because we're not done yet. + ok = writer.write(data, encoding); + } + } while (i > 0 && ok); + if (i > 0) { + // had to stop early! + // write some more once it drains + writer.once('drain', write); + } + } +} +``` + +#### Event: 'error' + +* {Error} + +Emitted if there was an error when writing or piping data. + +#### Event: 'finish' + +When the [`stream.end()`][stream-end] method has been called, and all data has +been flushed to the underlying system, this event is emitted. + +```javascript +var writer = getWritableStreamSomehow(); +for (var i = 0; i < 100; i ++) { + writer.write('hello, #${i}!\n'); +} +writer.end('this is the end\n'); +writer.on('finish', () => { + console.error('all writes are now complete.'); +}); +``` + +#### Event: 'pipe' + +* `src` {stream.Readable} source stream that is piping to this writable + +This is emitted whenever the [`stream.pipe()`][] method is called on a readable +stream, adding this writable to its set of destinations. + +```js +var writer = getWritableStreamSomehow(); +var reader = getReadableStreamSomehow(); +writer.on('pipe', (src) => { + console.error('something is piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +``` + +#### Event: 'unpipe' + +* `src` {[Readable][] Stream} The source stream that + [unpiped][`stream.unpipe()`] this writable + +This is emitted whenever the [`stream.unpipe()`][] method is called on a +readable stream, removing this writable from its set of destinations. + +```js +var writer = getWritableStreamSomehow(); +var reader = getReadableStreamSomehow(); +writer.on('unpipe', (src) => { + console.error('something has stopped piping into the writer'); + assert.equal(src, reader); +}); +reader.pipe(writer); +reader.unpipe(writer); +``` + +#### writable.cork() + +Forces buffering of all writes. + +Buffered data will be flushed either at [`stream.uncork()`][] or at +[`stream.end()`][stream-end] call. + +#### writable.end([chunk][, encoding][, callback]) + +* `chunk` {String|Buffer} Optional data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Optional callback for when the stream is finished + +Call this method when no more data will be written to the stream. If supplied, +the callback is attached as a listener on the [`'finish'`][] event. + +Calling [`stream.write()`][stream-write] after calling +[`stream.end()`][stream-end] will raise an error. + +```js +// write 'hello, ' and then end with 'world!' +var file = fs.createWriteStream('example.txt'); +file.write('hello, '); +file.end('world!'); +// writing more now is not allowed! +``` + +#### writable.setDefaultEncoding(encoding) + +* `encoding` {String} The new default encoding + +Sets the default encoding for a writable stream. + +#### writable.uncork() + +Flush all data, buffered since [`stream.cork()`][] call. + +#### writable.write(chunk[, encoding][, callback]) + +* `chunk` {String|Buffer} The data to write +* `encoding` {String} The encoding, if `chunk` is a String +* `callback` {Function} Callback for when this chunk of data is flushed +* Returns: {Boolean} `true` if the data was handled completely. + +This method writes some data to the underlying system, and calls the +supplied callback once the data has been fully handled. + +The return value indicates if you should continue writing right now. +If the data had to be buffered internally, then it will return +`false`. Otherwise, it will return `true`. + +This return value is strictly advisory. You MAY continue to write, +even if it returns `false`. However, writes will be buffered in +memory, so it is best not to do this excessively. Instead, wait for +the [`'drain'`][] event before writing more data. + + +## API for Stream Implementors + + + +To implement any sort of stream, the pattern is the same: + +1. Extend the appropriate parent class in your own subclass. (The + [`util.inherits()`][] method is particularly helpful for this.) +2. Call the appropriate parent class constructor in your constructor, + to be sure that the internal mechanisms are set up properly. +3. Implement one or more specific methods, as detailed below. + +The class to extend and the method(s) to implement depend on the sort +of stream class you are writing: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+

Use-case

+
+

Class

+
+

Method(s) to implement

+
+

Reading only

+
+

[Readable](#stream_class_stream_readable_1)

+
+

[_read][stream-_read]

+
+

Writing only

+
+

[Writable](#stream_class_stream_writable_1)

+
+

[_write][stream-_write], [_writev][stream-_writev]

+
+

Reading and writing

+
+

[Duplex](#stream_class_stream_duplex_1)

+
+

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

+
+

Operate on written data, then read the result

+
+

[Transform](#stream_class_stream_transform_1)

+
+

[_transform][stream-_transform], [_flush][stream-_flush]

+
+ +In your implementation code, it is very important to never call the methods +described in [API for Stream Consumers][]. Otherwise, you can potentially cause +adverse side effects in programs that consume your streaming interfaces. + +### Class: stream.Duplex + + + +A "duplex" stream is one that is both Readable and Writable, such as a TCP +socket connection. + +Note that `stream.Duplex` is an abstract class designed to be extended +with an underlying implementation of the [`stream._read(size)`][stream-_read] +and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you +would with a Readable or Writable stream class. + +Since JavaScript doesn't have multiple prototypal inheritance, this class +prototypally inherits from Readable, and then parasitically from Writable. It is +thus up to the user to implement both the low-level +[`stream._read(n)`][stream-_read] method as well as the low-level +[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension +duplex classes. + +#### new stream.Duplex(options) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then + the stream will automatically end the readable side when the + writable side ends and vice versa. + * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` + for readable side of the stream. Has no effect if `objectMode` + is `true`. + * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` + for writable side of the stream. Has no effect if `objectMode` + is `true`. + +In classes that extend the Duplex class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +### Class: stream.PassThrough + +This is a trivial implementation of a [Transform][] stream that simply +passes the input bytes across to the output. Its purpose is mainly +for examples and testing, but there are occasionally use cases where +it can come in handy as a building block for novel sorts of streams. + +### Class: stream.Readable + + + +`stream.Readable` is an abstract class designed to be extended with an +underlying implementation of the [`stream._read(size)`][stream-_read] method. + +Please see [API for Stream Consumers][] for how to consume +streams in your programs. What follows is an explanation of how to +implement Readable streams in your programs. + +#### new stream.Readable([options]) + +* `options` {Object} + * `highWaterMark` {Number} The maximum number of bytes to store in + the internal buffer before ceasing to read from the underlying + resource. Default = `16384` (16kb), or `16` for `objectMode` streams + * `encoding` {String} If specified, then buffers will be decoded to + strings using the specified encoding. Default = `null` + * `objectMode` {Boolean} Whether this stream should behave + as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns + a single value instead of a Buffer of size n. Default = `false` + * `read` {Function} Implementation for the [`stream._read()`][stream-_read] + method. + +In classes that extend the Readable class, make sure to call the +Readable constructor so that the buffering settings can be properly +initialized. + +#### readable.\_read(size) + +* `size` {Number} Number of bytes to read asynchronously + +Note: **Implement this method, but do NOT call it directly.** + +This method is prefixed with an underscore because it is internal to the +class that defines it and should only be called by the internal Readable +class methods. All Readable stream implementations must provide a \_read +method to fetch data from the underlying resource. + +When `_read()` is called, if data is available from the resource, the `_read()` +implementation should start pushing that data into the read queue by calling +[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from +the resource and pushing data until push returns `false`, at which point it +should stop reading from the resource. Only when `_read()` is called again after +it has stopped should it start reading more data from the resource and pushing +that data onto the queue. + +Note: once the `_read()` method is called, it will not be called again until +the [`stream.push()`][stream-push] method is called. + +The `size` argument is advisory. Implementations where a "read" is a +single call that returns data can use this to know how much data to +fetch. Implementations where that is not relevant, such as TCP or +TLS, may ignore this argument, and simply provide data whenever it +becomes available. There is no need, for example to "wait" until +`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. + +#### readable.push(chunk[, encoding]) + + +* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue +* `encoding` {String} Encoding of String chunks. Must be a valid + Buffer encoding, such as `'utf8'` or `'ascii'` +* return {Boolean} Whether or not more pushes should be performed + +Note: **This method should be called by Readable implementors, NOT +by consumers of Readable streams.** + +If a value other than null is passed, The `push()` method adds a chunk of data +into the queue for subsequent stream processors to consume. If `null` is +passed, it signals the end of the stream (EOF), after which no more data +can be written. + +The data added with `push()` can be pulled out by calling the +[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. + +This API is designed to be as flexible as possible. For example, +you may be wrapping a lower-level source which has some sort of +pause/resume mechanism, and a data callback. In those cases, you +could wrap the low-level source object by doing something like this: + +```js +// source is an object with readStop() and readStart() methods, +// and an `ondata` member that gets called when it has data, and +// an `onend` member that gets called when the data is over. + +util.inherits(SourceWrapper, Readable); + +function SourceWrapper(options) { + Readable.call(this, options); + + this._source = getLowlevelSourceObject(); + + // Every time there's data, we push it into the internal buffer. + this._source.ondata = (chunk) => { + // if push() returns false, then we need to stop reading from source + if (!this.push(chunk)) + this._source.readStop(); + }; + + // When the source ends, we push the EOF-signaling `null` chunk + this._source.onend = () => { + this.push(null); + }; +} + +// _read will be called when the stream wants to pull more data in +// the advisory size argument is ignored in this case. +SourceWrapper.prototype._read = function(size) { + this._source.readStart(); +}; +``` + +#### Example: A Counting Stream + + + +This is a basic example of a Readable stream. It emits the numerals +from 1 to 1,000,000 in ascending order, and then ends. + +```js +const Readable = require('stream').Readable; +const util = require('util'); +util.inherits(Counter, Readable); + +function Counter(opt) { + Readable.call(this, opt); + this._max = 1000000; + this._index = 1; +} + +Counter.prototype._read = function() { + var i = this._index++; + if (i > this._max) + this.push(null); + else { + var str = '' + i; + var buf = new Buffer(str, 'ascii'); + this.push(buf); + } +}; +``` + +#### Example: SimpleProtocol v1 (Sub-optimal) + +This is similar to the `parseHeader` function described +[here](#stream_readable_unshift_chunk), but implemented as a custom stream. +Also, note that this implementation does not convert the incoming data to a +string. + +However, this would be better implemented as a [Transform][] stream. See +[SimpleProtocol v2][] for a better implementation. + +```js +// A parser for a simple data protocol. +// The "header" is a JSON object, followed by 2 \n characters, and +// then a message body. +// +// NOTE: This can be done more simply as a Transform stream! +// Using Readable directly for this is sub-optimal. See the +// alternative example below under the Transform section. + +const Readable = require('stream').Readable; +const util = require('util'); + +util.inherits(SimpleProtocol, Readable); + +function SimpleProtocol(source, options) { + if (!(this instanceof SimpleProtocol)) + return new SimpleProtocol(source, options); + + Readable.call(this, options); + this._inBody = false; + this._sawFirstCr = false; + + // source is a readable stream, such as a socket or file + this._source = source; + + var self = this; + source.on('end', () => { + self.push(null); + }); + + // give it a kick whenever the source is readable + // read(0) will not consume any bytes + source.on('readable', () => { + self.read(0); + }); + + this._rawHeader = []; + this.header = null; +} + +SimpleProtocol.prototype._read = function(n) { + if (!this._inBody) { + var chunk = this._source.read(); + + // if the source doesn't have data, we don't have data yet. + if (chunk === null) + return this.push(''); + + // check if the chunk has a \n\n + var split = -1; + for (var i = 0; i < chunk.length; i++) { + if (chunk[i] === 10) { // '\n' + if (this._sawFirstCr) { + split = i; + break; + } else { + this._sawFirstCr = true; + } + } else { + this._sawFirstCr = false; + } + } + + if (split === -1) { + // still waiting for the \n\n + // stash the chunk, and try again. + this._rawHeader.push(chunk); + this.push(''); + } else { + this._inBody = true; + var h = chunk.slice(0, split); + this._rawHeader.push(h); + var header = Buffer.concat(this._rawHeader).toString(); + try { + this.header = JSON.parse(header); + } catch (er) { + this.emit('error', new Error('invalid simple protocol data')); + return; + } + // now, because we got some extra data, unshift the rest + // back into the read queue so that our consumer will see it. + var b = chunk.slice(split); + this.unshift(b); + // calling unshift by itself does not reset the reading state + // of the stream; since we're inside _read, doing an additional + // push('') will reset the state appropriately. + this.push(''); + + // and let them know that we are done parsing the header. + this.emit('header', this.header); + } + } else { + // from there on, just provide the data to our consumer. + // careful not to push(null), since that would indicate EOF. + var chunk = this._source.read(); + if (chunk) this.push(chunk); + } +}; + +// Usage: +// var parser = new SimpleProtocol(source); +// Now parser is a readable stream that will emit 'header' +// with the parsed header data. +``` + +### Class: stream.Transform + +A "transform" stream is a duplex stream where the output is causally +connected in some way to the input, such as a [zlib][] stream or a +[crypto][] stream. + +There is no requirement that the output be the same size as the input, +the same number of chunks, or arrive at the same time. For example, a +Hash stream will only ever have a single chunk of output which is +provided when the input is ended. A zlib stream will produce output +that is either much smaller or much larger than its input. + +Rather than implement the [`stream._read()`][stream-_read] and +[`stream._write()`][stream-_write] methods, Transform classes must implement the +[`stream._transform()`][stream-_transform] method, and may optionally +also implement the [`stream._flush()`][stream-_flush] method. (See below.) + +#### new stream.Transform([options]) + +* `options` {Object} Passed to both Writable and Readable + constructors. Also has the following fields: + * `transform` {Function} Implementation for the + [`stream._transform()`][stream-_transform] method. + * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] + method. + +In classes that extend the Transform class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +#### Events: 'finish' and 'end' + +The [`'finish'`][] and [`'end'`][] events are from the parent Writable +and Readable classes respectively. The `'finish'` event is fired after +[`stream.end()`][stream-end] is called and all chunks have been processed by +[`stream._transform()`][stream-_transform], `'end'` is fired after all data has +been output which is after the callback in [`stream._flush()`][stream-_flush] +has been called. + +#### transform.\_flush(callback) + +* `callback` {Function} Call this function (optionally with an error + argument) when you are done flushing any remaining data. + +Note: **This function MUST NOT be called directly.** It MAY be implemented +by child classes, and if so, will be called by the internal Transform +class methods only. + +In some cases, your transform operation may need to emit a bit more +data at the end of the stream. For example, a `Zlib` compression +stream will store up some internal state so that it can optimally +compress the output. At the end, however, it needs to do the best it +can with what is left, so that the data will be complete. + +In those cases, you can implement a `_flush()` method, which will be +called at the very end, after all the written data is consumed, but +before emitting [`'end'`][] to signal the end of the readable side. Just +like with [`stream._transform()`][stream-_transform], call +`transform.push(chunk)` zero or more times, as appropriate, and call `callback` +when the flush operation is complete. + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### transform.\_transform(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be transformed. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} Call this function (optionally with an error + argument and data) when you are done processing the supplied chunk. + +Note: **This function MUST NOT be called directly.** It should be +implemented by child classes, and called by the internal Transform +class methods only. + +All Transform stream implementations must provide a `_transform()` +method to accept input and produce output. + +`_transform()` should do whatever has to be done in this specific +Transform class, to handle the bytes being written, and pass them off +to the readable portion of the interface. Do asynchronous I/O, +process things, and so on. + +Call `transform.push(outputChunk)` 0 or more times to generate output +from this input chunk, depending on how much data you want to output +as a result of this chunk. + +Call the callback function only when the current chunk is completely +consumed. Note that there may or may not be output as a result of any +particular input chunk. If you supply a second argument to the callback +it will be passed to the push method. In other words the following are +equivalent: + +```js +transform.prototype._transform = function (data, encoding, callback) { + this.push(data); + callback(); +}; + +transform.prototype._transform = function (data, encoding, callback) { + callback(null, data); +}; +``` + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### Example: `SimpleProtocol` parser v2 + +The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple +protocol parser can be implemented simply by using the higher level +[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol +v1` examples. + +In this example, rather than providing the input as an argument, it +would be piped into the parser, which is a more idiomatic Node.js stream +approach. + +```javascript +const util = require('util'); +const Transform = require('stream').Transform; +util.inherits(SimpleProtocol, Transform); + +function SimpleProtocol(options) { + if (!(this instanceof SimpleProtocol)) + return new SimpleProtocol(options); + + Transform.call(this, options); + this._inBody = false; + this._sawFirstCr = false; + this._rawHeader = []; + this.header = null; +} + +SimpleProtocol.prototype._transform = function(chunk, encoding, done) { + if (!this._inBody) { + // check if the chunk has a \n\n + var split = -1; + for (var i = 0; i < chunk.length; i++) { + if (chunk[i] === 10) { // '\n' + if (this._sawFirstCr) { + split = i; + break; + } else { + this._sawFirstCr = true; + } + } else { + this._sawFirstCr = false; + } + } + + if (split === -1) { + // still waiting for the \n\n + // stash the chunk, and try again. + this._rawHeader.push(chunk); + } else { + this._inBody = true; + var h = chunk.slice(0, split); + this._rawHeader.push(h); + var header = Buffer.concat(this._rawHeader).toString(); + try { + this.header = JSON.parse(header); + } catch (er) { + this.emit('error', new Error('invalid simple protocol data')); + return; + } + // and let them know that we are done parsing the header. + this.emit('header', this.header); + + // now, because we got some extra data, emit this first. + this.push(chunk.slice(split)); + } + } else { + // from there on, just provide the data to our consumer as-is. + this.push(chunk); + } + done(); +}; + +// Usage: +// var parser = new SimpleProtocol(); +// source.pipe(parser) +// Now parser is a readable stream that will emit 'header' +// with the parsed header data. +``` + +### Class: stream.Writable + + + +`stream.Writable` is an abstract class designed to be extended with an +underlying implementation of the +[`stream._write(chunk, encoding, callback)`][stream-_write] method. + +Please see [API for Stream Consumers][] for how to consume +writable streams in your programs. What follows is an explanation of +how to implement Writable streams in your programs. + +#### new stream.Writable([options]) + +* `options` {Object} + * `highWaterMark` {Number} Buffer level when + [`stream.write()`][stream-write] starts returning `false`. Default = `16384` + (16kb), or `16` for `objectMode` streams. + * `decodeStrings` {Boolean} Whether or not to decode strings into + Buffers before passing them to [`stream._write()`][stream-_write]. + Default = `true` + * `objectMode` {Boolean} Whether or not the + [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can + write arbitrary data instead of only `Buffer` / `String` data. + Default = `false` + * `write` {Function} Implementation for the + [`stream._write()`][stream-_write] method. + * `writev` {Function} Implementation for the + [`stream._writev()`][stream-_writev] method. + +In classes that extend the Writable class, make sure to call the +constructor so that the buffering settings can be properly +initialized. + +#### writable.\_write(chunk, encoding, callback) + +* `chunk` {Buffer|String} The chunk to be written. Will **always** + be a buffer unless the `decodeStrings` option was set to `false`. +* `encoding` {String} If the chunk is a string, then this is the + encoding type. If chunk is a buffer, then this is the special + value - 'buffer', ignore it in this case. +* `callback` {Function} Call this function (optionally with an error + argument) when you are done processing the supplied chunk. + +All Writable stream implementations must provide a +[`stream._write()`][stream-_write] method to send data to the underlying +resource. + +Note: **This function MUST NOT be called directly.** It should be +implemented by child classes, and called by the internal Writable +class methods only. + +Call the callback using the standard `callback(error)` pattern to +signal that the write completed successfully or with an error. + +If the `decodeStrings` flag is set in the constructor options, then +`chunk` may be a string rather than a Buffer, and `encoding` will +indicate the sort of string that it is. This is to support +implementations that have an optimized handling for certain string +data encodings. If you do not explicitly set the `decodeStrings` +option to `false`, then you can safely ignore the `encoding` argument, +and assume that `chunk` will always be a Buffer. + +This method is prefixed with an underscore because it is internal to +the class that defines it, and should not be called directly by user +programs. However, you **are** expected to override this method in +your own extension classes. + +#### writable.\_writev(chunks, callback) + +* `chunks` {Array} The chunks to be written. Each chunk has following + format: `{ chunk: ..., encoding: ... }`. +* `callback` {Function} Call this function (optionally with an error + argument) when you are done processing the supplied chunks. + +Note: **This function MUST NOT be called directly.** It may be +implemented by child classes, and called by the internal Writable +class methods only. + +This function is completely optional to implement. In most cases it is +unnecessary. If implemented, it will be called with all the chunks +that are buffered in the write queue. + + +## Simplified Constructor API + + + +In simple cases there is now the added benefit of being able to construct a +stream without inheritance. + +This can be done by passing the appropriate methods as constructor options: + +Examples: + +### Duplex + +```js +var duplex = new stream.Duplex({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + }, + write: function(chunk, encoding, next) { + // sets this._write under the hood + + // An optional error can be passed as the first argument + next() + } +}); + +// or + +var duplex = new stream.Duplex({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + }, + writev: function(chunks, next) { + // sets this._writev under the hood + + // An optional error can be passed as the first argument + next() + } +}); +``` + +### Readable + +```js +var readable = new stream.Readable({ + read: function(n) { + // sets this._read under the hood + + // push data onto the read queue, passing null + // will signal the end of the stream (EOF) + this.push(chunk); + } +}); +``` + +### Transform + +```js +var transform = new stream.Transform({ + transform: function(chunk, encoding, next) { + // sets this._transform under the hood + + // generate output as many times as needed + // this.push(chunk); + + // call when the current chunk is consumed + next(); + }, + flush: function(done) { + // sets this._flush under the hood + + // generate output as many times as needed + // this.push(chunk); + + done(); + } +}); +``` + +### Writable + +```js +var writable = new stream.Writable({ + write: function(chunk, encoding, next) { + // sets this._write under the hood + + // An optional error can be passed as the first argument + next() + } +}); + +// or + +var writable = new stream.Writable({ + writev: function(chunks, next) { + // sets this._writev under the hood + + // An optional error can be passed as the first argument + next() + } +}); +``` + +## Streams: Under the Hood + + + +### Buffering + + + +Both Writable and Readable streams will buffer data on an internal +object which can be retrieved from `_writableState.getBuffer()` or +`_readableState.buffer`, respectively. + +The amount of data that will potentially be buffered depends on the +`highWaterMark` option which is passed into the constructor. + +Buffering in Readable streams happens when the implementation calls +[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not +call [`stream.read()`][stream-read], then the data will sit in the internal +queue until it is consumed. + +Buffering in Writable streams happens when the user calls +[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. + +The purpose of streams, especially with the [`stream.pipe()`][] method, is to +limit the buffering of data to acceptable levels, so that sources and +destinations of varying speed will not overwhelm the available memory. + +### Compatibility with Older Node.js Versions + + + +In versions of Node.js prior to v0.10, the Readable stream interface was +simpler, but also less powerful and less useful. + +* Rather than waiting for you to call the [`stream.read()`][stream-read] method, + [`'data'`][] events would start emitting immediately. If you needed to do + some I/O to decide how to handle data, then you had to store the chunks + in some kind of buffer so that they would not be lost. +* The [`stream.pause()`][stream-pause] method was advisory, rather than + guaranteed. This meant that you still had to be prepared to receive + [`'data'`][] events even when the stream was in a paused state. + +In Node.js v0.10, the [Readable][] class was added. +For backwards compatibility with older Node.js programs, Readable streams +switch into "flowing mode" when a [`'data'`][] event handler is added, or +when the [`stream.resume()`][stream-resume] method is called. The effect is +that, even if you are not using the new [`stream.read()`][stream-read] method +and [`'readable'`][] event, you no longer have to worry about losing +[`'data'`][] chunks. + +Most programs will continue to function normally. However, this +introduces an edge case in the following conditions: + +* No [`'data'`][] event handler is added. +* The [`stream.resume()`][stream-resume] method is never called. +* The stream is not piped to any writable destination. + +For example, consider the following code: + +```js +// WARNING! BROKEN! +net.createServer((socket) => { + + // we add an 'end' method, but never consume the data + socket.on('end', () => { + // It will never get here. + socket.end('I got your message (but didnt read it)\n'); + }); + +}).listen(1337); +``` + +In versions of Node.js prior to v0.10, the incoming message data would be +simply discarded. However, in Node.js v0.10 and beyond, +the socket will remain paused forever. + +The workaround in this situation is to call the +[`stream.resume()`][stream-resume] method to start the flow of data: + +```js +// Workaround +net.createServer((socket) => { + + socket.on('end', () => { + socket.end('I got your message (but didnt read it)\n'); + }); + + // start the flow of data, discarding it. + socket.resume(); + +}).listen(1337); +``` + +In addition to new Readable streams switching into flowing mode, +pre-v0.10 style streams can be wrapped in a Readable class using the +[`stream.wrap()`][] method. + + +### Object Mode + + + +Normally, Streams operate on Strings and Buffers exclusively. + +Streams that are in **object mode** can emit generic JavaScript values +other than Buffers and Strings. + +A Readable stream in object mode will always return a single item from +a call to [`stream.read(size)`][stream-read], regardless of what the size +argument is. + +A Writable stream in object mode will always ignore the `encoding` +argument to [`stream.write(data, encoding)`][stream-write]. + +The special value `null` still retains its special value for object +mode streams. That is, for object mode readable streams, `null` as a +return value from [`stream.read()`][stream-read] indicates that there is no more +data, and [`stream.push(null)`][stream-push] will signal the end of stream data +(`EOF`). + +No streams in Node.js core are object mode streams. This pattern is only +used by userland streaming libraries. + +You should set `objectMode` in your stream child class constructor on +the options object. Setting `objectMode` mid-stream is not safe. + +For Duplex streams `objectMode` can be set exclusively for readable or +writable side with `readableObjectMode` and `writableObjectMode` +respectively. These options can be used to implement parsers and +serializers with Transform streams. + +```js +const util = require('util'); +const StringDecoder = require('string_decoder').StringDecoder; +const Transform = require('stream').Transform; +util.inherits(JSONParseStream, Transform); + +// Gets \n-delimited JSON string data, and emits the parsed objects +function JSONParseStream() { + if (!(this instanceof JSONParseStream)) + return new JSONParseStream(); + + Transform.call(this, { readableObjectMode : true }); + + this._buffer = ''; + this._decoder = new StringDecoder('utf8'); +} + +JSONParseStream.prototype._transform = function(chunk, encoding, cb) { + this._buffer += this._decoder.write(chunk); + // split on newlines + var lines = this._buffer.split(/\r?\n/); + // keep the last partial line buffered + this._buffer = lines.pop(); + for (var l = 0; l < lines.length; l++) { + var line = lines[l]; + try { + var obj = JSON.parse(line); + } catch (er) { + this.emit('error', er); + return; + } + // push the parsed object out to the readable consumer + this.push(obj); + } + cb(); +}; + +JSONParseStream.prototype._flush = function(cb) { + // Just handle any leftover + var rem = this._buffer.trim(); + if (rem) { + try { + var obj = JSON.parse(rem); + } catch (er) { + this.emit('error', er); + return; + } + // push the parsed object out to the readable consumer + this.push(obj); + } + cb(); +}; +``` + +### `stream.read(0)` + +There are some cases where you want to trigger a refresh of the +underlying readable stream mechanisms, without actually consuming any +data. In that case, you can call `stream.read(0)`, which will always +return null. + +If the internal read buffer is below the `highWaterMark`, and the +stream is not currently reading, then calling `stream.read(0)` will trigger +a low-level [`stream._read()`][stream-_read] call. + +There is almost never a need to do this. However, you will see some +cases in Node.js's internals where this is done, particularly in the +Readable stream class internals. + +### `stream.push('')` + +Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an +interesting side effect. Because it *is* a call to +[`stream.push()`][stream-push], it will end the `reading` process. However, it +does *not* add any data to the readable buffer, so there's nothing for +a user to consume. + +Very rarely, there are cases where you have no data to provide now, +but the consumer of your stream (or, perhaps, another bit of your own +code) will know when to check again, by calling [`stream.read(0)`][stream-read]. +In those cases, you *may* call `stream.push('')`. + +So far, the only use case for this functionality is in the +[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you +find that you have to use `stream.push('')`, please consider another +approach, because it almost certainly indicates that something is +horribly wrong. + +[`'data'`]: #stream_event_data +[`'drain'`]: #stream_event_drain +[`'end'`]: #stream_event_end +[`'finish'`]: #stream_event_finish +[`'readable'`]: #stream_event_readable +[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end +[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter +[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr +[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin +[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout +[`stream.cork()`]: #stream_writable_cork +[`stream.pipe()`]: #stream_readable_pipe_destination_options +[`stream.uncork()`]: #stream_writable_uncork +[`stream.unpipe()`]: #stream_readable_unpipe_destination +[`stream.wrap()`]: #stream_readable_wrap_stream +[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream +[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor +[API for Stream Consumers]: #stream_api_for_stream_consumers +[API for Stream Implementors]: #stream_api_for_stream_implementors +[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin +[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout +[Compatibility]: #stream_compatibility_with_older_node_js_versions +[crypto]: crypto.html +[Duplex]: #stream_class_stream_duplex +[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream +[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream +[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest +[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse +[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage +[Object mode]: #stream_object_mode +[Readable]: #stream_class_stream_readable +[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 +[stream-_flush]: #stream_transform_flush_callback +[stream-_read]: #stream_readable_read_size_1 +[stream-_transform]: #stream_transform_transform_chunk_encoding_callback +[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 +[stream-_writev]: #stream_writable_writev_chunks_callback +[stream-end]: #stream_writable_end_chunk_encoding_callback +[stream-pause]: #stream_readable_pause +[stream-push]: #stream_readable_push_chunk_encoding +[stream-read]: #stream_readable_read_size +[stream-resume]: #stream_readable_resume +[stream-write]: #stream_writable_write_chunk_encoding_callback +[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket +[Transform]: #stream_class_stream_transform +[Writable]: #stream_class_stream_writable +[zlib]: zlib.html diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 00000000000000..c141a99c26c638 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,58 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js new file mode 100644 index 00000000000000..ca807af87620dd --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_duplex.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000000000..736693b8400fed --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,75 @@ +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +var keys = objectKeys(Writable.prototype); +for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + processNextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000000000..d06f71f1868d77 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,26 @@ +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000000000..54a9d5c553d69e --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,880 @@ +'use strict'; + +module.exports = Readable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + +Readable.ReadableState = ReadableState; + +var EE = require('events'); + +/**/ +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = undefined; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var StringDecoder; + +util.inherits(Readable, Stream); + +var Duplex; +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.buffer = []; + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // when piping, we only care about 'readable' events that happen + // after read()ing all the bytes and not getting any pushback. + this.ranOut = false; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +var Duplex; +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options && typeof options.read === 'function') this._read = options.read; + + Stream.call(this); +} + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + + if (!state.objectMode && typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = new Buffer(chunk, encoding); + encoding = ''; + } + } + + return readableAddChunk(this, state, chunk, encoding, false); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + var state = this._readableState; + return readableAddChunk(this, state, chunk, '', true); +}; + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +function readableAddChunk(stream, state, chunk, encoding, addToFront) { + var er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (state.ended && !addToFront) { + var e = new Error('stream.push() after EOF'); + stream.emit('error', e); + } else if (state.endEmitted && addToFront) { + var e = new Error('stream.unshift() after end event'); + stream.emit('error', e); + } else { + var skipAdd; + if (state.decoder && !addToFront && !encoding) { + chunk = state.decoder.write(chunk); + skipAdd = !state.objectMode && chunk.length === 0; + } + + if (!addToFront) state.reading = false; + + // Don't add to the buffer if we've decoded to an empty string chunk and + // we're not in object mode + if (!skipAdd) { + // if we want the data now, just emit it. + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + } + + maybeReadMore(stream, state); + } + } else if (!addToFront) { + state.reading = false; + } + + return needMoreData(state); +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +function howMuchToRead(n, state) { + if (state.length === 0 && state.ended) return 0; + + if (state.objectMode) return n === 0 ? 0 : 1; + + if (n === null || isNaN(n)) { + // only flow one buffer at a time + if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; + } + + if (n <= 0) return 0; + + // If we're asking for more than the target buffer level, + // then raise the water mark. Bump up to the next highest + // power of 2, to prevent increasing it excessively in tiny + // amounts. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + + // don't have that much. return null, unless we've ended. + if (n > state.length) { + if (!state.ended) { + state.needReadable = true; + return 0; + } else { + return state.length; + } + } + + return n; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + var state = this._readableState; + var nOrig = n; + + if (typeof n !== 'number' || n > 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } + + if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + } + + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (doRead && !state.reading) n = howMuchToRead(nOrig, state); + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } + + state.length -= n; + + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (state.length === 0 && !state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended && state.length === 0) endReadable(this); + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function chunkInvalid(state, chunk) { + var er = null; + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + processNextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : cleanup; + if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable) { + debug('onunpipe'); + if (readable === src) { + cleanup(); + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', cleanup); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + if (false === ret) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { + debug('false write response, pause', src._readableState.awaitDrain); + src._readableState.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + // This is a brutally ugly hack to make sure that our error handler + // is attached before any userland ones. NEVER DO THIS. + if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var _i = 0; _i < len; _i++) { + dests[_i].emit('unpipe', this); + }return this; + } + + // try to find the right one. + var i = indexOf(state.pipes, dest); + if (i === -1) return this; + + state.pipes.splice(i, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + // If listening to data, and it has not explicitly been paused, + // then call resume to start the flow of data on the next tick. + if (ev === 'data' && false !== this._readableState.flowing) { + this.resume(); + } + + if (ev === 'readable' && !this._readableState.endEmitted) { + var state = this._readableState; + if (!state.readableListening) { + state.readableListening = true; + state.emittedReadable = false; + state.needReadable = true; + if (!state.reading) { + processNextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this, state); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + processNextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + if (state.flowing) { + do { + var chunk = stream.read(); + } while (null !== chunk && state.flowing); + } +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var state = this._readableState; + var paused = false; + + var self = this; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) self.push(chunk); + } + + self.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = self.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + var events = ['error', 'close', 'destroy', 'pause', 'resume']; + forEach(events, function (ev) { + stream.on(ev, self.emit.bind(self, ev)); + }); + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + self._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return self; +}; + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +function fromList(n, state) { + var list = state.buffer; + var length = state.length; + var stringMode = !!state.decoder; + var objectMode = !!state.objectMode; + var ret; + + // nothing in the list, definitely empty. + if (list.length === 0) return null; + + if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { + // read it all, truncate the array. + if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); + list.length = 0; + } else { + // read just some of it. + if (n < list[0].length) { + // just take a part of the first list item. + // slice is the same for buffers and strings. + var buf = list[0]; + ret = buf.slice(0, n); + list[0] = buf.slice(n); + } else if (n === list[0].length) { + // first list is a perfect match + ret = list.shift(); + } else { + // complex case. + // we have enough to cover it, but it spans past the first buffer. + if (stringMode) ret = '';else ret = new Buffer(n); + + var c = 0; + for (var i = 0, l = list.length; i < l && c < n; i++) { + var buf = list[0]; + var cpy = Math.min(n - c, buf.length); + + if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); + + if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); + + c += cpy; + } + } + } + + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('endReadable called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + processNextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function forEach(xs, f) { + for (var i = 0, l = xs.length; i < l; i++) { + f(xs[i], i); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000000000..625cdc17698059 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,180 @@ +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function TransformState(stream) { + this.afterTransform = function (er, data) { + return afterTransform(stream, er, data); + }; + + this.needTransform = false; + this.transforming = false; + this.writecb = null; + this.writechunk = null; + this.writeencoding = null; +} + +function afterTransform(stream, er, data) { + var ts = stream._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); + + ts.writechunk = null; + ts.writecb = null; + + if (data !== null && data !== undefined) stream.push(data); + + cb(er); + + var rs = stream._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + stream._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = new TransformState(this); + + // when the writable side finishes, then flush out anything remaining. + var stream = this; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + this.once('prefinish', function () { + if (typeof this._flush === 'function') this._flush(function (er) { + done(stream, er); + });else done(stream); + }); +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +function done(stream, er) { + if (er) return stream.emit('error', er); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + var ws = stream._writableState; + var ts = stream._transformState; + + if (ws.length) throw new Error('calling transform done when ws.length != 0'); + + if (ts.transforming) throw new Error('calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000000000..95916c992a9507 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,516 @@ +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/**/ +var processNextTick = require('process-nextick-args'); +/**/ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; +/**/ + +/**/ +var Buffer = require('buffer').Buffer; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = require('core-util-is'); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream; +(function () { + try { + Stream = require('st' + 'ream'); + } catch (_) {} finally { + if (!Stream) Stream = require('events').EventEmitter; + } +})(); +/**/ + +var Buffer = require('buffer').Buffer; + +util.inherits(Writable, Stream); + +function nop() {} + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +var Duplex; +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; + + // cast to ints. + this.highWaterMark = ~ ~this.highWaterMark; + + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // create the two objects needed to store the corked requests + // they are not a linked list, as no new elements are inserted in there + this.corkedRequestsFree = new CorkedRequest(this); + this.corkedRequestsFree.next = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function writableStateGetBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') + }); + } catch (_) {} +})(); + +var Duplex; +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, though they're not + // instanceof Writable, they're instanceof Readable. + if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe. Not readable.')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + processNextTick(cb, er); +} + +// If we get something that is not a buffer, string, null, or undefined, +// and we're not in objectMode, then that's an error. +// Otherwise stream chunks are all considered to be of length=1, and the +// watermarks determine how many objects to keep in the buffer, rather than +// how many bytes or characters. +function validChunk(stream, state, chunk, cb) { + var valid = true; + + if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { + var er = new TypeError('Invalid non-string/buffer chunk'); + stream.emit('error', er); + processNextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = new Buffer(chunk, encoding); + } + return chunk; +} + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, chunk, encoding, cb) { + chunk = decodeChunk(state, chunk, encoding); + + if (Buffer.isBuffer(chunk)) encoding = 'buffer'; + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) processNextTick(cb, er);else cb(er); + + stream._writableState.errorEmitted = true; + stream.emit('error', er); +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + while (entry) { + buffer[count] = entry; + entry = entry.next; + count += 1; + } + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequestCount = 0; + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending && !state.finished) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function prefinish(stream, state) { + if (!state.prefinished) { + state.prefinished = true; + stream.emit('prefinish'); + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + if (state.pendingcb === 0) { + prefinish(stream, state); + state.finished = true; + stream.emit('finish'); + } else { + prefinish(stream, state); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) processNextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function (err) { + var entry = _this.entry; + _this.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + if (state.corkedRequestsFree) { + state.corkedRequestsFree.next = _this; + } else { + state.corkedRequestsFree = _this; + } + }; +} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE new file mode 100644 index 00000000000000..d8d7f9437dbf5a --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/LICENSE @@ -0,0 +1,19 @@ +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md new file mode 100644 index 00000000000000..5a76b4149c5eb5 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md @@ -0,0 +1,3 @@ +# core-util-is + +The `util.is*` functions introduced in Node v0.12. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch new file mode 100644 index 00000000000000..a06d5c05f75fd5 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch @@ -0,0 +1,604 @@ +diff --git a/lib/util.js b/lib/util.js +index a03e874..9074e8e 100644 +--- a/lib/util.js ++++ b/lib/util.js +@@ -19,430 +19,6 @@ + // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE + // USE OR OTHER DEALINGS IN THE SOFTWARE. + +-var formatRegExp = /%[sdj%]/g; +-exports.format = function(f) { +- if (!isString(f)) { +- var objects = []; +- for (var i = 0; i < arguments.length; i++) { +- objects.push(inspect(arguments[i])); +- } +- return objects.join(' '); +- } +- +- var i = 1; +- var args = arguments; +- var len = args.length; +- var str = String(f).replace(formatRegExp, function(x) { +- if (x === '%%') return '%'; +- if (i >= len) return x; +- switch (x) { +- case '%s': return String(args[i++]); +- case '%d': return Number(args[i++]); +- case '%j': +- try { +- return JSON.stringify(args[i++]); +- } catch (_) { +- return '[Circular]'; +- } +- default: +- return x; +- } +- }); +- for (var x = args[i]; i < len; x = args[++i]) { +- if (isNull(x) || !isObject(x)) { +- str += ' ' + x; +- } else { +- str += ' ' + inspect(x); +- } +- } +- return str; +-}; +- +- +-// Mark that a method should not be used. +-// Returns a modified function which warns once by default. +-// If --no-deprecation is set, then it is a no-op. +-exports.deprecate = function(fn, msg) { +- // Allow for deprecating things in the process of starting up. +- if (isUndefined(global.process)) { +- return function() { +- return exports.deprecate(fn, msg).apply(this, arguments); +- }; +- } +- +- if (process.noDeprecation === true) { +- return fn; +- } +- +- var warned = false; +- function deprecated() { +- if (!warned) { +- if (process.throwDeprecation) { +- throw new Error(msg); +- } else if (process.traceDeprecation) { +- console.trace(msg); +- } else { +- console.error(msg); +- } +- warned = true; +- } +- return fn.apply(this, arguments); +- } +- +- return deprecated; +-}; +- +- +-var debugs = {}; +-var debugEnviron; +-exports.debuglog = function(set) { +- if (isUndefined(debugEnviron)) +- debugEnviron = process.env.NODE_DEBUG || ''; +- set = set.toUpperCase(); +- if (!debugs[set]) { +- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { +- var pid = process.pid; +- debugs[set] = function() { +- var msg = exports.format.apply(exports, arguments); +- console.error('%s %d: %s', set, pid, msg); +- }; +- } else { +- debugs[set] = function() {}; +- } +- } +- return debugs[set]; +-}; +- +- +-/** +- * Echos the value of a value. Trys to print the value out +- * in the best way possible given the different types. +- * +- * @param {Object} obj The object to print out. +- * @param {Object} opts Optional options object that alters the output. +- */ +-/* legacy: obj, showHidden, depth, colors*/ +-function inspect(obj, opts) { +- // default options +- var ctx = { +- seen: [], +- stylize: stylizeNoColor +- }; +- // legacy... +- if (arguments.length >= 3) ctx.depth = arguments[2]; +- if (arguments.length >= 4) ctx.colors = arguments[3]; +- if (isBoolean(opts)) { +- // legacy... +- ctx.showHidden = opts; +- } else if (opts) { +- // got an "options" object +- exports._extend(ctx, opts); +- } +- // set default options +- if (isUndefined(ctx.showHidden)) ctx.showHidden = false; +- if (isUndefined(ctx.depth)) ctx.depth = 2; +- if (isUndefined(ctx.colors)) ctx.colors = false; +- if (isUndefined(ctx.customInspect)) ctx.customInspect = true; +- if (ctx.colors) ctx.stylize = stylizeWithColor; +- return formatValue(ctx, obj, ctx.depth); +-} +-exports.inspect = inspect; +- +- +-// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics +-inspect.colors = { +- 'bold' : [1, 22], +- 'italic' : [3, 23], +- 'underline' : [4, 24], +- 'inverse' : [7, 27], +- 'white' : [37, 39], +- 'grey' : [90, 39], +- 'black' : [30, 39], +- 'blue' : [34, 39], +- 'cyan' : [36, 39], +- 'green' : [32, 39], +- 'magenta' : [35, 39], +- 'red' : [31, 39], +- 'yellow' : [33, 39] +-}; +- +-// Don't use 'blue' not visible on cmd.exe +-inspect.styles = { +- 'special': 'cyan', +- 'number': 'yellow', +- 'boolean': 'yellow', +- 'undefined': 'grey', +- 'null': 'bold', +- 'string': 'green', +- 'date': 'magenta', +- // "name": intentionally not styling +- 'regexp': 'red' +-}; +- +- +-function stylizeWithColor(str, styleType) { +- var style = inspect.styles[styleType]; +- +- if (style) { +- return '\u001b[' + inspect.colors[style][0] + 'm' + str + +- '\u001b[' + inspect.colors[style][1] + 'm'; +- } else { +- return str; +- } +-} +- +- +-function stylizeNoColor(str, styleType) { +- return str; +-} +- +- +-function arrayToHash(array) { +- var hash = {}; +- +- array.forEach(function(val, idx) { +- hash[val] = true; +- }); +- +- return hash; +-} +- +- +-function formatValue(ctx, value, recurseTimes) { +- // Provide a hook for user-specified inspect functions. +- // Check that value is an object with an inspect function on it +- if (ctx.customInspect && +- value && +- isFunction(value.inspect) && +- // Filter out the util module, it's inspect function is special +- value.inspect !== exports.inspect && +- // Also filter out any prototype objects using the circular check. +- !(value.constructor && value.constructor.prototype === value)) { +- var ret = value.inspect(recurseTimes, ctx); +- if (!isString(ret)) { +- ret = formatValue(ctx, ret, recurseTimes); +- } +- return ret; +- } +- +- // Primitive types cannot have properties +- var primitive = formatPrimitive(ctx, value); +- if (primitive) { +- return primitive; +- } +- +- // Look up the keys of the object. +- var keys = Object.keys(value); +- var visibleKeys = arrayToHash(keys); +- +- if (ctx.showHidden) { +- keys = Object.getOwnPropertyNames(value); +- } +- +- // Some type of object without properties can be shortcutted. +- if (keys.length === 0) { +- if (isFunction(value)) { +- var name = value.name ? ': ' + value.name : ''; +- return ctx.stylize('[Function' + name + ']', 'special'); +- } +- if (isRegExp(value)) { +- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); +- } +- if (isDate(value)) { +- return ctx.stylize(Date.prototype.toString.call(value), 'date'); +- } +- if (isError(value)) { +- return formatError(value); +- } +- } +- +- var base = '', array = false, braces = ['{', '}']; +- +- // Make Array say that they are Array +- if (isArray(value)) { +- array = true; +- braces = ['[', ']']; +- } +- +- // Make functions say that they are functions +- if (isFunction(value)) { +- var n = value.name ? ': ' + value.name : ''; +- base = ' [Function' + n + ']'; +- } +- +- // Make RegExps say that they are RegExps +- if (isRegExp(value)) { +- base = ' ' + RegExp.prototype.toString.call(value); +- } +- +- // Make dates with properties first say the date +- if (isDate(value)) { +- base = ' ' + Date.prototype.toUTCString.call(value); +- } +- +- // Make error with message first say the error +- if (isError(value)) { +- base = ' ' + formatError(value); +- } +- +- if (keys.length === 0 && (!array || value.length == 0)) { +- return braces[0] + base + braces[1]; +- } +- +- if (recurseTimes < 0) { +- if (isRegExp(value)) { +- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); +- } else { +- return ctx.stylize('[Object]', 'special'); +- } +- } +- +- ctx.seen.push(value); +- +- var output; +- if (array) { +- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); +- } else { +- output = keys.map(function(key) { +- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); +- }); +- } +- +- ctx.seen.pop(); +- +- return reduceToSingleString(output, base, braces); +-} +- +- +-function formatPrimitive(ctx, value) { +- if (isUndefined(value)) +- return ctx.stylize('undefined', 'undefined'); +- if (isString(value)) { +- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') +- .replace(/'/g, "\\'") +- .replace(/\\"/g, '"') + '\''; +- return ctx.stylize(simple, 'string'); +- } +- if (isNumber(value)) { +- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0, +- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 . +- if (value === 0 && 1 / value < 0) +- return ctx.stylize('-0', 'number'); +- return ctx.stylize('' + value, 'number'); +- } +- if (isBoolean(value)) +- return ctx.stylize('' + value, 'boolean'); +- // For some reason typeof null is "object", so special case here. +- if (isNull(value)) +- return ctx.stylize('null', 'null'); +-} +- +- +-function formatError(value) { +- return '[' + Error.prototype.toString.call(value) + ']'; +-} +- +- +-function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { +- var output = []; +- for (var i = 0, l = value.length; i < l; ++i) { +- if (hasOwnProperty(value, String(i))) { +- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, +- String(i), true)); +- } else { +- output.push(''); +- } +- } +- keys.forEach(function(key) { +- if (!key.match(/^\d+$/)) { +- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, +- key, true)); +- } +- }); +- return output; +-} +- +- +-function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { +- var name, str, desc; +- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; +- if (desc.get) { +- if (desc.set) { +- str = ctx.stylize('[Getter/Setter]', 'special'); +- } else { +- str = ctx.stylize('[Getter]', 'special'); +- } +- } else { +- if (desc.set) { +- str = ctx.stylize('[Setter]', 'special'); +- } +- } +- if (!hasOwnProperty(visibleKeys, key)) { +- name = '[' + key + ']'; +- } +- if (!str) { +- if (ctx.seen.indexOf(desc.value) < 0) { +- if (isNull(recurseTimes)) { +- str = formatValue(ctx, desc.value, null); +- } else { +- str = formatValue(ctx, desc.value, recurseTimes - 1); +- } +- if (str.indexOf('\n') > -1) { +- if (array) { +- str = str.split('\n').map(function(line) { +- return ' ' + line; +- }).join('\n').substr(2); +- } else { +- str = '\n' + str.split('\n').map(function(line) { +- return ' ' + line; +- }).join('\n'); +- } +- } +- } else { +- str = ctx.stylize('[Circular]', 'special'); +- } +- } +- if (isUndefined(name)) { +- if (array && key.match(/^\d+$/)) { +- return str; +- } +- name = JSON.stringify('' + key); +- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { +- name = name.substr(1, name.length - 2); +- name = ctx.stylize(name, 'name'); +- } else { +- name = name.replace(/'/g, "\\'") +- .replace(/\\"/g, '"') +- .replace(/(^"|"$)/g, "'"); +- name = ctx.stylize(name, 'string'); +- } +- } +- +- return name + ': ' + str; +-} +- +- +-function reduceToSingleString(output, base, braces) { +- var numLinesEst = 0; +- var length = output.reduce(function(prev, cur) { +- numLinesEst++; +- if (cur.indexOf('\n') >= 0) numLinesEst++; +- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; +- }, 0); +- +- if (length > 60) { +- return braces[0] + +- (base === '' ? '' : base + '\n ') + +- ' ' + +- output.join(',\n ') + +- ' ' + +- braces[1]; +- } +- +- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; +-} +- +- + // NOTE: These type checking functions intentionally don't use `instanceof` + // because it is fragile and can be easily faked with `Object.create()`. + function isArray(ar) { +@@ -522,166 +98,10 @@ function isPrimitive(arg) { + exports.isPrimitive = isPrimitive; + + function isBuffer(arg) { +- return arg instanceof Buffer; ++ return Buffer.isBuffer(arg); + } + exports.isBuffer = isBuffer; + + function objectToString(o) { + return Object.prototype.toString.call(o); +-} +- +- +-function pad(n) { +- return n < 10 ? '0' + n.toString(10) : n.toString(10); +-} +- +- +-var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', +- 'Oct', 'Nov', 'Dec']; +- +-// 26 Feb 16:19:34 +-function timestamp() { +- var d = new Date(); +- var time = [pad(d.getHours()), +- pad(d.getMinutes()), +- pad(d.getSeconds())].join(':'); +- return [d.getDate(), months[d.getMonth()], time].join(' '); +-} +- +- +-// log is just a thin wrapper to console.log that prepends a timestamp +-exports.log = function() { +- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); +-}; +- +- +-/** +- * Inherit the prototype methods from one constructor into another. +- * +- * The Function.prototype.inherits from lang.js rewritten as a standalone +- * function (not on Function.prototype). NOTE: If this file is to be loaded +- * during bootstrapping this function needs to be rewritten using some native +- * functions as prototype setup using normal JavaScript does not work as +- * expected during bootstrapping (see mirror.js in r114903). +- * +- * @param {function} ctor Constructor function which needs to inherit the +- * prototype. +- * @param {function} superCtor Constructor function to inherit prototype from. +- */ +-exports.inherits = function(ctor, superCtor) { +- ctor.super_ = superCtor; +- ctor.prototype = Object.create(superCtor.prototype, { +- constructor: { +- value: ctor, +- enumerable: false, +- writable: true, +- configurable: true +- } +- }); +-}; +- +-exports._extend = function(origin, add) { +- // Don't do anything if add isn't an object +- if (!add || !isObject(add)) return origin; +- +- var keys = Object.keys(add); +- var i = keys.length; +- while (i--) { +- origin[keys[i]] = add[keys[i]]; +- } +- return origin; +-}; +- +-function hasOwnProperty(obj, prop) { +- return Object.prototype.hasOwnProperty.call(obj, prop); +-} +- +- +-// Deprecated old stuff. +- +-exports.p = exports.deprecate(function() { +- for (var i = 0, len = arguments.length; i < len; ++i) { +- console.error(exports.inspect(arguments[i])); +- } +-}, 'util.p: Use console.error() instead'); +- +- +-exports.exec = exports.deprecate(function() { +- return require('child_process').exec.apply(this, arguments); +-}, 'util.exec is now called `child_process.exec`.'); +- +- +-exports.print = exports.deprecate(function() { +- for (var i = 0, len = arguments.length; i < len; ++i) { +- process.stdout.write(String(arguments[i])); +- } +-}, 'util.print: Use console.log instead'); +- +- +-exports.puts = exports.deprecate(function() { +- for (var i = 0, len = arguments.length; i < len; ++i) { +- process.stdout.write(arguments[i] + '\n'); +- } +-}, 'util.puts: Use console.log instead'); +- +- +-exports.debug = exports.deprecate(function(x) { +- process.stderr.write('DEBUG: ' + x + '\n'); +-}, 'util.debug: Use console.error instead'); +- +- +-exports.error = exports.deprecate(function(x) { +- for (var i = 0, len = arguments.length; i < len; ++i) { +- process.stderr.write(arguments[i] + '\n'); +- } +-}, 'util.error: Use console.error instead'); +- +- +-exports.pump = exports.deprecate(function(readStream, writeStream, callback) { +- var callbackCalled = false; +- +- function call(a, b, c) { +- if (callback && !callbackCalled) { +- callback(a, b, c); +- callbackCalled = true; +- } +- } +- +- readStream.addListener('data', function(chunk) { +- if (writeStream.write(chunk) === false) readStream.pause(); +- }); +- +- writeStream.addListener('drain', function() { +- readStream.resume(); +- }); +- +- readStream.addListener('end', function() { +- writeStream.end(); +- }); +- +- readStream.addListener('close', function() { +- call(); +- }); +- +- readStream.addListener('error', function(err) { +- writeStream.end(); +- call(err); +- }); +- +- writeStream.addListener('error', function(err) { +- readStream.destroy(); +- call(err); +- }); +-}, 'util.pump(): Use readableStream.pipe() instead'); +- +- +-var uv; +-exports._errnoException = function(err, syscall) { +- if (isUndefined(uv)) uv = process.binding('uv'); +- var errname = uv.errname(err); +- var e = new Error(syscall + ' ' + errname); +- e.code = errname; +- e.errno = errname; +- e.syscall = syscall; +- return e; +-}; ++} \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js new file mode 100644 index 00000000000000..ff4c851c075a2f --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js @@ -0,0 +1,107 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json new file mode 100644 index 00000000000000..4f65c18e22abb3 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json @@ -0,0 +1,106 @@ +{ + "_args": [ + [ + { + "raw": "core-util-is@~1.0.0", + "scope": null, + "escapedName": "core-util-is", + "name": "core-util-is", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" + ], + [ + { + "raw": "core-util-is@~1.0.0", + "scope": null, + "escapedName": "core-util-is", + "name": "core-util-is", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" + ] + ], + "_from": "core-util-is@~1.0.0", + "_id": "core-util-is@1.0.2", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream/core-util-is", + "_nodeVersion": "4.0.0", + "_npmUser": { + "name": "isaacs", + "email": "i@izs.me" + }, + "_npmVersion": "3.3.2", + "_phantomChildren": {}, + "_requested": { + "raw": "core-util-is@~1.0.0", + "scope": null, + "escapedName": "core-util-is", + "name": "core-util-is", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", + "_shrinkwrap": null, + "_spec": "core-util-is@~1.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", + "author": { + "name": "Isaac Z. Schlueter", + "email": "i@izs.me", + "url": "http://blog.izs.me/" + }, + "bugs": { + "url": "https://github.com/isaacs/core-util-is/issues" + }, + "dependencies": {}, + "description": "The `util.is*` functions introduced in Node v0.12.", + "devDependencies": { + "tap": "^2.3.0" + }, + "directories": {}, + "dist": { + "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7", + "tarball": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + }, + "gitHead": "a177da234df5638b363ddc15fa324619a38577c8", + "homepage": "https://github.com/isaacs/core-util-is#readme", + "keywords": [ + "util", + "isBuffer", + "isArray", + "isNumber", + "isString", + "isRegExp", + "isThis", + "isThat", + "polyfill" + ], + "license": "MIT", + "main": "lib/util.js", + "maintainers": [ + { + "name": "isaacs", + "email": "i@izs.me" + } + ], + "name": "core-util-is", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/core-util-is.git" + }, + "scripts": { + "test": "tap test.js" + }, + "version": "1.0.2" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js new file mode 100644 index 00000000000000..1a490c65ac8b5d --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/test.js @@ -0,0 +1,68 @@ +var assert = require('tap'); + +var t = require('./lib/util'); + +assert.equal(t.isArray([]), true); +assert.equal(t.isArray({}), false); + +assert.equal(t.isBoolean(null), false); +assert.equal(t.isBoolean(true), true); +assert.equal(t.isBoolean(false), true); + +assert.equal(t.isNull(null), true); +assert.equal(t.isNull(undefined), false); +assert.equal(t.isNull(false), false); +assert.equal(t.isNull(), false); + +assert.equal(t.isNullOrUndefined(null), true); +assert.equal(t.isNullOrUndefined(undefined), true); +assert.equal(t.isNullOrUndefined(false), false); +assert.equal(t.isNullOrUndefined(), true); + +assert.equal(t.isNumber(null), false); +assert.equal(t.isNumber('1'), false); +assert.equal(t.isNumber(1), true); + +assert.equal(t.isString(null), false); +assert.equal(t.isString('1'), true); +assert.equal(t.isString(1), false); + +assert.equal(t.isSymbol(null), false); +assert.equal(t.isSymbol('1'), false); +assert.equal(t.isSymbol(1), false); +assert.equal(t.isSymbol(Symbol()), true); + +assert.equal(t.isUndefined(null), false); +assert.equal(t.isUndefined(undefined), true); +assert.equal(t.isUndefined(false), false); +assert.equal(t.isUndefined(), true); + +assert.equal(t.isRegExp(null), false); +assert.equal(t.isRegExp('1'), false); +assert.equal(t.isRegExp(new RegExp()), true); + +assert.equal(t.isObject({}), true); +assert.equal(t.isObject([]), true); +assert.equal(t.isObject(new RegExp()), true); +assert.equal(t.isObject(new Date()), true); + +assert.equal(t.isDate(null), false); +assert.equal(t.isDate('1'), false); +assert.equal(t.isDate(new Date()), true); + +assert.equal(t.isError(null), false); +assert.equal(t.isError({ err: true }), false); +assert.equal(t.isError(new Error()), true); + +assert.equal(t.isFunction(null), false); +assert.equal(t.isFunction({ }), false); +assert.equal(t.isFunction(function() {}), true); + +assert.equal(t.isPrimitive(null), true); +assert.equal(t.isPrimitive(''), true); +assert.equal(t.isPrimitive(0), true); +assert.equal(t.isPrimitive(new Date()), false); + +assert.equal(t.isBuffer(null), false); +assert.equal(t.isBuffer({}), false); +assert.equal(t.isBuffer(new Buffer(0)), true); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore new file mode 100644 index 00000000000000..3c3629e647f5dd --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml new file mode 100644 index 00000000000000..cc4dba29d959a2 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile new file mode 100644 index 00000000000000..0ecc29c402c243 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/Makefile @@ -0,0 +1,5 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md new file mode 100644 index 00000000000000..16d2c59c6195f9 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json new file mode 100644 index 00000000000000..9e31b683889015 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js new file mode 100644 index 00000000000000..a57f63495943a0 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json new file mode 100644 index 00000000000000..d3a2bda3c6583e --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json @@ -0,0 +1,116 @@ +{ + "_args": [ + [ + { + "raw": "isarray@~1.0.0", + "scope": null, + "escapedName": "isarray", + "name": "isarray", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" + ], + [ + { + "raw": "isarray@~1.0.0", + "scope": null, + "escapedName": "isarray", + "name": "isarray", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" + ] + ], + "_from": "isarray@~1.0.0", + "_id": "isarray@1.0.0", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream/isarray", + "_nodeVersion": "5.1.0", + "_npmUser": { + "name": "juliangruber", + "email": "julian@juliangruber.com" + }, + "_npmVersion": "3.3.12", + "_phantomChildren": {}, + "_requested": { + "raw": "isarray@~1.0.0", + "scope": null, + "escapedName": "isarray", + "name": "isarray", + "rawSpec": "~1.0.0", + "spec": ">=1.0.0 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", + "_shrinkwrap": null, + "_spec": "isarray@~1.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "bugs": { + "url": "https://github.com/juliangruber/isarray/issues" + }, + "dependencies": {}, + "description": "Array#isArray for older browsers", + "devDependencies": { + "tape": "~2.13.4" + }, + "directories": {}, + "dist": { + "shasum": "bb935d48582cba168c06834957a54a3e07124f11", + "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + }, + "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", + "homepage": "https://github.com/juliangruber/isarray", + "keywords": [ + "browser", + "isarray", + "array" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "juliangruber", + "email": "julian@juliangruber.com" + } + ], + "name": "isarray", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "scripts": { + "test": "tape test.js" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "version": "1.0.0" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js new file mode 100644 index 00000000000000..f7f7bcd19fec56 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/isarray/test.js @@ -0,0 +1,19 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml new file mode 100644 index 00000000000000..36201b10017a5e --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml @@ -0,0 +1,12 @@ +language: node_js +node_js: + - "0.8" + - "0.10" + - "0.11" + - "0.12" + - "1.7.1" + - 1 + - 2 + - 3 + - 4 + - 5 diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js new file mode 100644 index 00000000000000..a4f40f845faa65 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/index.js @@ -0,0 +1,43 @@ +'use strict'; + +if (!process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = nextTick; +} else { + module.exports = process.nextTick; +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md new file mode 100644 index 00000000000000..c67e3532b54245 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json new file mode 100644 index 00000000000000..f5d106537ad8dc --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/package.json @@ -0,0 +1,95 @@ +{ + "_args": [ + [ + { + "raw": "process-nextick-args@~1.0.6", + "scope": null, + "escapedName": "process-nextick-args", + "name": "process-nextick-args", + "rawSpec": "~1.0.6", + "spec": ">=1.0.6 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" + ], + [ + { + "raw": "process-nextick-args@~1.0.6", + "scope": null, + "escapedName": "process-nextick-args", + "name": "process-nextick-args", + "rawSpec": "~1.0.6", + "spec": ">=1.0.6 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" + ] + ], + "_from": "process-nextick-args@~1.0.6", + "_id": "process-nextick-args@1.0.7", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream/process-nextick-args", + "_nodeVersion": "5.11.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776" + }, + "_npmUser": { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + }, + "_npmVersion": "3.8.6", + "_phantomChildren": {}, + "_requested": { + "raw": "process-nextick-args@~1.0.6", + "scope": null, + "escapedName": "process-nextick-args", + "name": "process-nextick-args", + "rawSpec": "~1.0.6", + "spec": ">=1.0.6 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", + "_shrinkwrap": null, + "_spec": "process-nextick-args@~1.0.6", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", + "author": "", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "dependencies": {}, + "description": "process.nextTick but always with args", + "devDependencies": { + "tap": "~0.2.6" + }, + "directories": {}, + "dist": { + "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3", + "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz" + }, + "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39", + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + } + ], + "name": "process-nextick-args", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "scripts": { + "test": "node test.js" + }, + "version": "1.0.7" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md new file mode 100644 index 00000000000000..78e7cfaeb7acde --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var nextTick = require('process-nextick-args'); + +nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js new file mode 100644 index 00000000000000..ef15721584ac99 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/process-nextick-args/test.js @@ -0,0 +1,24 @@ +var test = require("tap").test; +var nextTick = require('./'); + +test('should work', function (t) { + t.plan(5); + nextTick(function (a) { + t.ok(a); + nextTick(function (thing) { + t.equals(thing, 7); + }, 7); + }, true); + nextTick(function (a, b, c) { + t.equals(a, 'step'); + t.equals(b, 3); + t.equals(c, 'profit'); + }, 'step', 3, 'profit'); +}); + +test('correct number of arguments', function (t) { + t.plan(1); + nextTick(function () { + t.equals(2, arguments.length, 'correct number'); + }, 1, 2); +}); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore new file mode 100644 index 00000000000000..206320cc1d21b9 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore @@ -0,0 +1,2 @@ +build +test diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE new file mode 100644 index 00000000000000..6de584a48f5c89 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE @@ -0,0 +1,20 @@ +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md new file mode 100644 index 00000000000000..4d2aa001501107 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md @@ -0,0 +1,7 @@ +**string_decoder.js** (`require('string_decoder')`) from Node.js core + +Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details. + +Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.** + +The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version. \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js new file mode 100644 index 00000000000000..b00e54fb790982 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js @@ -0,0 +1,221 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var Buffer = require('buffer').Buffer; + +var isBufferEncoding = Buffer.isEncoding + || function(encoding) { + switch (encoding && encoding.toLowerCase()) { + case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; + default: return false; + } + } + + +function assertEncoding(encoding) { + if (encoding && !isBufferEncoding(encoding)) { + throw new Error('Unknown encoding: ' + encoding); + } +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. CESU-8 is handled as part of the UTF-8 encoding. +// +// @TODO Handling all encodings inside a single object makes it very difficult +// to reason about this code, so it should be split up in the future. +// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code +// points as used by CESU-8. +var StringDecoder = exports.StringDecoder = function(encoding) { + this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); + assertEncoding(encoding); + switch (this.encoding) { + case 'utf8': + // CESU-8 represents each of Surrogate Pair by 3-bytes + this.surrogateSize = 3; + break; + case 'ucs2': + case 'utf16le': + // UTF-16 represents each of Surrogate Pair by 2-bytes + this.surrogateSize = 2; + this.detectIncompleteChar = utf16DetectIncompleteChar; + break; + case 'base64': + // Base-64 stores 3 bytes in 4 chars, and pads the remainder. + this.surrogateSize = 3; + this.detectIncompleteChar = base64DetectIncompleteChar; + break; + default: + this.write = passThroughWrite; + return; + } + + // Enough space to store all bytes of a single character. UTF-8 needs 4 + // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). + this.charBuffer = new Buffer(6); + // Number of bytes received for the current incomplete multi-byte character. + this.charReceived = 0; + // Number of bytes expected for the current incomplete multi-byte character. + this.charLength = 0; +}; + + +// write decodes the given buffer and returns it as JS string that is +// guaranteed to not contain any partial multi-byte characters. Any partial +// character found at the end of the buffer is buffered up, and will be +// returned when calling write again with the remaining bytes. +// +// Note: Converting a Buffer containing an orphan surrogate to a String +// currently works, but converting a String to a Buffer (via `new Buffer`, or +// Buffer#write) will replace incomplete surrogates with the unicode +// replacement character. See https://codereview.chromium.org/121173009/ . +StringDecoder.prototype.write = function(buffer) { + var charStr = ''; + // if our last write ended with an incomplete multibyte character + while (this.charLength) { + // determine how many remaining bytes this buffer has to offer for this char + var available = (buffer.length >= this.charLength - this.charReceived) ? + this.charLength - this.charReceived : + buffer.length; + + // add the new bytes to the char buffer + buffer.copy(this.charBuffer, this.charReceived, 0, available); + this.charReceived += available; + + if (this.charReceived < this.charLength) { + // still not enough chars in this buffer? wait for more ... + return ''; + } + + // remove bytes belonging to the current character from the buffer + buffer = buffer.slice(available, buffer.length); + + // get the character that was split + charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); + + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + var charCode = charStr.charCodeAt(charStr.length - 1); + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + this.charLength += this.surrogateSize; + charStr = ''; + continue; + } + this.charReceived = this.charLength = 0; + + // if there are no more bytes in this buffer, just emit our char + if (buffer.length === 0) { + return charStr; + } + break; + } + + // determine and set charLength / charReceived + this.detectIncompleteChar(buffer); + + var end = buffer.length; + if (this.charLength) { + // buffer the incomplete character bytes we got + buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); + end -= this.charReceived; + } + + charStr += buffer.toString(this.encoding, 0, end); + + var end = charStr.length - 1; + var charCode = charStr.charCodeAt(end); + // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character + if (charCode >= 0xD800 && charCode <= 0xDBFF) { + var size = this.surrogateSize; + this.charLength += size; + this.charReceived += size; + this.charBuffer.copy(this.charBuffer, size, 0, size); + buffer.copy(this.charBuffer, 0, 0, size); + return charStr.substring(0, end); + } + + // or just emit the charStr + return charStr; +}; + +// detectIncompleteChar determines if there is an incomplete UTF-8 character at +// the end of the given buffer. If so, it sets this.charLength to the byte +// length that character, and sets this.charReceived to the number of bytes +// that are available for this character. +StringDecoder.prototype.detectIncompleteChar = function(buffer) { + // determine how many bytes we have to check at the end of this buffer + var i = (buffer.length >= 3) ? 3 : buffer.length; + + // Figure out if one of the last i bytes of our buffer announces an + // incomplete char. + for (; i > 0; i--) { + var c = buffer[buffer.length - i]; + + // See http://en.wikipedia.org/wiki/UTF-8#Description + + // 110XXXXX + if (i == 1 && c >> 5 == 0x06) { + this.charLength = 2; + break; + } + + // 1110XXXX + if (i <= 2 && c >> 4 == 0x0E) { + this.charLength = 3; + break; + } + + // 11110XXX + if (i <= 3 && c >> 3 == 0x1E) { + this.charLength = 4; + break; + } + } + this.charReceived = i; +}; + +StringDecoder.prototype.end = function(buffer) { + var res = ''; + if (buffer && buffer.length) + res = this.write(buffer); + + if (this.charReceived) { + var cr = this.charReceived; + var buf = this.charBuffer; + var enc = this.encoding; + res += buf.slice(0, cr).toString(enc); + } + + return res; +}; + +function passThroughWrite(buffer) { + return buffer.toString(this.encoding); +} + +function utf16DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 2; + this.charLength = this.charReceived ? 2 : 0; +} + +function base64DetectIncompleteChar(buffer) { + this.charReceived = buffer.length % 3; + this.charLength = this.charReceived ? 3 : 0; +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json new file mode 100644 index 00000000000000..36fa27f82b498b --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json @@ -0,0 +1,99 @@ +{ + "_args": [ + [ + { + "raw": "string_decoder@~0.10.x", + "scope": null, + "escapedName": "string_decoder", + "name": "string_decoder", + "rawSpec": "~0.10.x", + "spec": ">=0.10.0 <0.11.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" + ], + [ + { + "raw": "string_decoder@~0.10.x", + "scope": null, + "escapedName": "string_decoder", + "name": "string_decoder", + "rawSpec": "~0.10.x", + "spec": ">=0.10.0 <0.11.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" + ] + ], + "_from": "string_decoder@~0.10.x", + "_id": "string_decoder@0.10.31", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream/string_decoder", + "_npmUser": { + "name": "rvagg", + "email": "rod@vagg.org" + }, + "_npmVersion": "1.4.23", + "_phantomChildren": {}, + "_requested": { + "raw": "string_decoder@~0.10.x", + "scope": null, + "escapedName": "string_decoder", + "name": "string_decoder", + "rawSpec": "~0.10.x", + "spec": ">=0.10.0 <0.11.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", + "_shrinkwrap": null, + "_spec": "string_decoder@~0.10.x", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/rvagg/string_decoder/issues" + }, + "dependencies": {}, + "description": "The string_decoder module from Node core", + "devDependencies": { + "tap": "~0.4.8" + }, + "directories": {}, + "dist": { + "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94", + "tarball": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + }, + "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0", + "homepage": "https://github.com/rvagg/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "index.js", + "maintainers": [ + { + "name": "substack", + "email": "mail@substack.net" + }, + { + "name": "rvagg", + "email": "rod@vagg.org" + } + ], + "name": "string_decoder", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/rvagg/string_decoder.git" + }, + "scripts": { + "test": "tap test/simple/*.js" + }, + "version": "0.10.31" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md new file mode 100644 index 00000000000000..acc8675372e980 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE new file mode 100644 index 00000000000000..6a60e8c225c9ba --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md new file mode 100644 index 00000000000000..75622fa7c250a6 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js new file mode 100644 index 00000000000000..549ae2f065ea5a --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js new file mode 100644 index 00000000000000..5e6fcff5ddd3fb --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json new file mode 100644 index 00000000000000..44061da89bcdfb --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/node_modules/util-deprecate/package.json @@ -0,0 +1,101 @@ +{ + "_args": [ + [ + { + "raw": "util-deprecate@~1.0.1", + "scope": null, + "escapedName": "util-deprecate", + "name": "util-deprecate", + "rawSpec": "~1.0.1", + "spec": ">=1.0.1 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream/node_modules/readable-stream" + ], + [ + { + "raw": "util-deprecate@~1.0.1", + "scope": null, + "escapedName": "util-deprecate", + "name": "util-deprecate", + "rawSpec": "~1.0.1", + "spec": ">=1.0.1 <1.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream" + ] + ], + "_from": "util-deprecate@~1.0.1", + "_id": "util-deprecate@1.0.2", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream/util-deprecate", + "_nodeVersion": "4.1.2", + "_npmUser": { + "name": "tootallnate", + "email": "nathan@tootallnate.net" + }, + "_npmVersion": "2.14.4", + "_phantomChildren": {}, + "_requested": { + "raw": "util-deprecate@~1.0.1", + "scope": null, + "escapedName": "util-deprecate", + "name": "util-deprecate", + "rawSpec": "~1.0.1", + "spec": ">=1.0.1 <1.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "_shrinkwrap": null, + "_spec": "util-deprecate@~1.0.1", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream", + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io/" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "dependencies": {}, + "description": "The Node.js `util.deprecate()` function with browser support", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "tarball": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" + }, + "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4", + "homepage": "https://github.com/TooTallNate/util-deprecate", + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "license": "MIT", + "main": "node.js", + "maintainers": [ + { + "name": "tootallnate", + "email": "nathan@tootallnate.net" + } + ], + "name": "util-deprecate", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.2" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json new file mode 100644 index 00000000000000..ae39f5f97d50ee --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/package.json @@ -0,0 +1,125 @@ +{ + "_args": [ + [ + { + "raw": "readable-stream@~2.0.0", + "scope": null, + "escapedName": "readable-stream", + "name": "readable-stream", + "rawSpec": "~2.0.0", + "spec": ">=2.0.0 <2.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/concat-stream" + ], + [ + { + "raw": "readable-stream@~2.0.0", + "scope": null, + "escapedName": "readable-stream", + "name": "readable-stream", + "rawSpec": "~2.0.0", + "spec": ">=2.0.0 <2.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2" + ] + ], + "_from": "readable-stream@~2.0.0", + "_id": "readable-stream@2.0.6", + "_inCache": true, + "_location": "/mississippi/through2/readable-stream", + "_nodeVersion": "5.7.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445" + }, + "_npmUser": { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + }, + "_npmVersion": "3.6.0", + "_phantomChildren": {}, + "_requested": { + "raw": "readable-stream@~2.0.0", + "scope": null, + "escapedName": "readable-stream", + "name": "readable-stream", + "rawSpec": "~2.0.0", + "spec": ">=2.0.0 <2.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", + "_shrinkwrap": null, + "_spec": "readable-stream@~2.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2", + "browser": { + "util": false + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + }, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "tap": "~0.2.6", + "tape": "~4.5.1", + "zuul": "~3.9.0" + }, + "directories": {}, + "dist": { + "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e", + "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz" + }, + "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e", + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "maintainers": [ + { + "name": "isaacs", + "email": "isaacs@npmjs.com" + }, + { + "name": "tootallnate", + "email": "nathan@tootallnate.net" + }, + { + "name": "rvagg", + "email": "rod@vagg.org" + }, + { + "name": "cwmma", + "email": "calvin.metcalf@gmail.com" + } + ], + "name": "readable-stream", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "browser": "npm run write-zuul && zuul -- test/browser.js", + "test": "tap test/parallel/*.js test/ours/*.js", + "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" + }, + "version": "2.0.6" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js new file mode 100644 index 00000000000000..27e8d8a55165f9 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_passthrough.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js new file mode 100644 index 00000000000000..6222a579864dd2 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/readable.js @@ -0,0 +1,12 @@ +var Stream = (function (){ + try { + return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify + } catch(_){} +}()); +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = Stream || exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js new file mode 100644 index 00000000000000..5d482f0780e993 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_transform.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js new file mode 100644 index 00000000000000..e1e9efdf3c12e9 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/readable-stream/writable.js @@ -0,0 +1 @@ +module.exports = require("./lib/_stream_writable.js") diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/.npmignore b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/.npmignore new file mode 100644 index 00000000000000..3c3629e647f5dd --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/LICENCE b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/LICENCE new file mode 100644 index 00000000000000..1a14b437e87a8f --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/LICENCE @@ -0,0 +1,19 @@ +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/Makefile b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/Makefile new file mode 100644 index 00000000000000..d583fcf49dc1a3 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/Makefile @@ -0,0 +1,4 @@ +browser: + node ./support/compile + +.PHONY: browser \ No newline at end of file diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/README.md b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/README.md new file mode 100644 index 00000000000000..093cb2978e4af0 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: 'c' +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licenced + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/immutable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/immutable.js new file mode 100644 index 00000000000000..94889c9de11a18 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/mutable.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/mutable.js new file mode 100644 index 00000000000000..72debede6ca585 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/package.json new file mode 100644 index 00000000000000..36ff9dfc55ea2d --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/package.json @@ -0,0 +1,117 @@ +{ + "_args": [ + [ + { + "raw": "xtend@~4.0.0", + "scope": null, + "escapedName": "xtend", + "name": "xtend", + "rawSpec": "~4.0.0", + "spec": ">=4.0.0 <4.1.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2" + ] + ], + "_from": "xtend@>=4.0.0 <4.1.0", + "_id": "xtend@4.0.1", + "_inCache": true, + "_location": "/mississippi/through2/xtend", + "_nodeVersion": "0.10.32", + "_npmUser": { + "name": "raynos", + "email": "raynos2@gmail.com" + }, + "_npmVersion": "2.14.1", + "_phantomChildren": {}, + "_requested": { + "raw": "xtend@~4.0.0", + "scope": null, + "escapedName": "xtend", + "name": "xtend", + "rawSpec": "~4.0.0", + "spec": ">=4.0.0 <4.1.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi/through2" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", + "_shasum": "a5c6d532be656e23db820efb943a1f04998d63af", + "_shrinkwrap": null, + "_spec": "xtend@~4.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi/node_modules/through2", + "author": { + "name": "Raynos", + "email": "raynos2@gmail.com" + }, + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "directories": {}, + "dist": { + "shasum": "a5c6d532be656e23db820efb943a1f04998d63af", + "tarball": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" + }, + "engines": { + "node": ">=0.4" + }, + "gitHead": "23dc302a89756da89c1897bc732a752317e35390", + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "license": "MIT", + "main": "immutable", + "maintainers": [ + { + "name": "raynos", + "email": "raynos2@gmail.com" + } + ], + "name": "xtend", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "version": "4.0.1" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/test.js b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/test.js new file mode 100644 index 00000000000000..093a2b061e81ae --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/node_modules/xtend/test.js @@ -0,0 +1,83 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/package.json b/deps/npm/node_modules/mississippi/node_modules/through2/package.json new file mode 100644 index 00000000000000..c7413b9f91c5df --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/package.json @@ -0,0 +1,106 @@ +{ + "_args": [ + [ + { + "raw": "through2@^2.0.0", + "scope": null, + "escapedName": "through2", + "name": "through2", + "rawSpec": "^2.0.0", + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm/node_modules/mississippi" + ] + ], + "_from": "through2@>=2.0.0 <3.0.0", + "_id": "through2@2.0.1", + "_inCache": true, + "_location": "/mississippi/through2", + "_nodeVersion": "5.5.0", + "_npmOperationalInternal": { + "host": "packages-6-west.internal.npmjs.com", + "tmp": "tmp/through2-2.0.1.tgz_1454928418348_0.7339043114334345" + }, + "_npmUser": { + "name": "rvagg", + "email": "rod@vagg.org" + }, + "_npmVersion": "3.6.0", + "_phantomChildren": { + "inherits": "2.0.3" + }, + "_requested": { + "raw": "through2@^2.0.0", + "scope": null, + "escapedName": "through2", + "name": "through2", + "rawSpec": "^2.0.0", + "spec": ">=2.0.0 <3.0.0", + "type": "range" + }, + "_requiredBy": [ + "/mississippi" + ], + "_resolved": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz", + "_shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", + "_shrinkwrap": null, + "_spec": "through2@^2.0.0", + "_where": "/Users/zkat/Documents/code/npm/node_modules/mississippi", + "author": { + "name": "Rod Vagg", + "email": "r@va.gg", + "url": "https://github.com/rvagg" + }, + "bugs": { + "url": "https://github.com/rvagg/through2/issues" + }, + "dependencies": { + "readable-stream": "~2.0.0", + "xtend": "~4.0.0" + }, + "description": "A tiny wrapper around Node streams2 Transform to avoid explicit subclassing noise", + "devDependencies": { + "bl": "~0.9.4", + "faucet": "0.0.1", + "stream-spigot": "~3.0.5", + "tape": "~4.0.0" + }, + "directories": {}, + "dist": { + "shasum": "384e75314d49f32de12eebb8136b8eb6b5d59da9", + "tarball": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz" + }, + "gitHead": "6d52a1b77db13a741f2708cd5854a198e4ae3072", + "homepage": "https://github.com/rvagg/through2#readme", + "keywords": [ + "stream", + "streams2", + "through", + "transform" + ], + "license": "MIT", + "main": "through2.js", + "maintainers": [ + { + "name": "rvagg", + "email": "rod@vagg.org" + }, + { + "name": "bryce", + "email": "bryce@ravenwall.com" + } + ], + "name": "through2", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/rvagg/through2.git" + }, + "scripts": { + "test": "node test/test.js | faucet", + "test-local": "brtapsauce-local test/basic-test.js" + }, + "version": "2.0.1" +} diff --git a/deps/npm/node_modules/mississippi/node_modules/through2/through2.js b/deps/npm/node_modules/mississippi/node_modules/through2/through2.js new file mode 100644 index 00000000000000..ef13980d7b9dd3 --- /dev/null +++ b/deps/npm/node_modules/mississippi/node_modules/through2/through2.js @@ -0,0 +1,96 @@ +var Transform = require('readable-stream/transform') + , inherits = require('util').inherits + , xtend = require('xtend') + +function DestroyableTransform(opts) { + Transform.call(this, opts) + this._destroyed = false +} + +inherits(DestroyableTransform, Transform) + +DestroyableTransform.prototype.destroy = function(err) { + if (this._destroyed) return + this._destroyed = true + + var self = this + process.nextTick(function() { + if (err) + self.emit('error', err) + self.emit('close') + }) +} + +// a noop _transform function +function noop (chunk, enc, callback) { + callback(null, chunk) +} + + +// create a new export function, used by both the main export and +// the .ctor export, contains common logic for dealing with arguments +function through2 (construct) { + return function (options, transform, flush) { + if (typeof options == 'function') { + flush = transform + transform = options + options = {} + } + + if (typeof transform != 'function') + transform = noop + + if (typeof flush != 'function') + flush = null + + return construct(options, transform, flush) + } +} + + +// main export, just make me a transform stream! +module.exports = through2(function (options, transform, flush) { + var t2 = new DestroyableTransform(options) + + t2._transform = transform + + if (flush) + t2._flush = flush + + return t2 +}) + + +// make me a reusable prototype that I can `new`, or implicitly `new` +// with a constructor call +module.exports.ctor = through2(function (options, transform, flush) { + function Through2 (override) { + if (!(this instanceof Through2)) + return new Through2(override) + + this.options = xtend(options, override) + + DestroyableTransform.call(this, this.options) + } + + inherits(Through2, DestroyableTransform) + + Through2.prototype._transform = transform + + if (flush) + Through2.prototype._flush = flush + + return Through2 +}) + + +module.exports.obj = through2(function (options, transform, flush) { + var t2 = new DestroyableTransform(xtend({ objectMode: true, highWaterMark: 16 }, options)) + + t2._transform = transform + + if (flush) + t2._flush = flush + + return t2 +}) diff --git a/deps/npm/node_modules/mississippi/package.json b/deps/npm/node_modules/mississippi/package.json new file mode 100644 index 00000000000000..3595472180efc4 --- /dev/null +++ b/deps/npm/node_modules/mississippi/package.json @@ -0,0 +1,95 @@ +{ + "_args": [ + [ + { + "raw": "mississippi@~1.2.0", + "scope": null, + "escapedName": "mississippi", + "name": "mississippi", + "rawSpec": "~1.2.0", + "spec": ">=1.2.0 <1.3.0", + "type": "range" + }, + "/Users/zkat/Documents/code/npm" + ] + ], + "_from": "mississippi@>=1.2.0 <1.3.0", + "_id": "mississippi@1.2.0", + "_inCache": true, + "_location": "/mississippi", + "_nodeVersion": "4.2.3", + "_npmUser": { + "name": "maxogden", + "email": "max@maxogden.com" + }, + "_npmVersion": "2.14.15", + "_phantomChildren": { + "inherits": "2.0.3", + "once": "1.4.0", + "readable-stream": "2.1.5", + "wrappy": "1.0.2" + }, + "_requested": { + "raw": "mississippi@~1.2.0", + "scope": null, + "escapedName": "mississippi", + "name": "mississippi", + "rawSpec": "~1.2.0", + "spec": ">=1.2.0 <1.3.0", + "type": "range" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz", + "_shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", + "_shrinkwrap": null, + "_spec": "mississippi@~1.2.0", + "_where": "/Users/zkat/Documents/code/npm", + "author": { + "name": "max ogden" + }, + "bugs": { + "url": "https://github.com/maxogden/mississippi/issues" + }, + "dependencies": { + "concat-stream": "^1.5.0", + "duplexify": "^3.4.2", + "end-of-stream": "^1.1.0", + "flush-write-stream": "^1.0.0", + "from2": "^2.1.0", + "pump": "^1.0.0", + "pumpify": "^1.3.3", + "stream-each": "^1.1.0", + "through2": "^2.0.0" + }, + "description": "a collection of useful streams", + "devDependencies": {}, + "directories": {}, + "dist": { + "shasum": "cd51bb9bbad3ddb13dee3cf60f1d0929c7a7fa4c", + "tarball": "https://registry.npmjs.org/mississippi/-/mississippi-1.2.0.tgz" + }, + "gitHead": "4aab2a2d4d98fd5e300a329048eb02a12df44c60", + "homepage": "https://github.com/maxogden/mississippi#readme", + "license": "BSD-2-Clause", + "main": "index.js", + "maintainers": [ + { + "name": "maxogden", + "email": "max@maxogden.com" + } + ], + "name": "mississippi", + "optionalDependencies": {}, + "readme": "ERROR: No README data found!", + "repository": { + "type": "git", + "url": "git+https://github.com/maxogden/mississippi.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.2.0" +} diff --git a/deps/npm/node_modules/mississippi/readme.md b/deps/npm/node_modules/mississippi/readme.md new file mode 100644 index 00000000000000..9013bb0dc53188 --- /dev/null +++ b/deps/npm/node_modules/mississippi/readme.md @@ -0,0 +1,352 @@ +# mississippi + +a collection of useful stream utility modules. learn how the modules work using this and then pick the ones you want and use them individually + +the goal of the modules included in mississippi is to make working with streams easy without sacrificing speed, error handling or composability. + +## usage + +```js +var miss = require('mississippi') +``` + +## methods + +- [pipe](#pipe) +- [each](#each) +- [pipeline](#pipeline) +- [duplex](#duplex) +- [through](#through) +- [from](#from) +- [to](#to) +- [concat](#concat) +- [finished](#finished) + +### pipe + +##### `miss.pipe(stream1, stream2, stream3, ..., cb)` + +Pipes streams together and destroys all of them if one of them closes. Calls `cb` with `(error)` if there was an error in any of the streams. + +When using standard `source.pipe(destination)` the source will _not_ be destroyed if the destination emits close or error. You are also not able to provide a callback to tell when then pipe has finished. + +`miss.pipe` does these two things for you, ensuring you handle stream errors 100% of the time (unhandled errors are probably the most common bug in most node streams code) + +#### original module + +`miss.pipe` is provided by [`require('pump')`](https://npmjs.org/pump) + +#### example + +```js +// lets do a simple file copy +var fs = require('fs') + +var read = fs.createReadStream('./original.zip') +var write = fs.createWriteStream('./copy.zip') + +// use miss.pipe instead of read.pipe(write) +miss.pipe(read, write, function (err) { + if (err) return console.error('Copy error!', err) + console.log('Copied successfully') +}) +``` + +### each + +##### `miss.each(stream, each, [done])` + +Iterate the data in `stream` one chunk at a time. Your `each` function will be called with with `(data, next)` where data is a data chunk and next is a callback. Call `next` when you are ready to consume the next chunk. + +Optionally you can call `next` with an error to destroy the stream. You can also pass the optional third argument, `done`, which is a function that will be called with `(err)` when the stream ends. The `err` argument will be populated with an error if the stream emitted an error. + +#### original module + +`miss.each` is provided by [`require('stream-each')`](https://npmjs.org/stream-each) + +#### example + +```js +var fs = require('fs') +var split = require('split2') + +var newLineSeparatedNumbers = fs.createReadStream('numbers.txt') + +var pipeline = miss.pipeline(newLineSeparatedNumbers, split()) +var each = miss.each(pipeline, eachLine, done) +var sum = 0 + +function eachLine (line, next) { + sum += parseInt(line.toString()) + next() +} + +function done (err) { + if (err) throw err + console.log('sum is', sum) +} +``` + +### pipeline + +##### `var pipeline = miss.pipeline(stream1, stream2, stream3, ...)` + +Builds a pipeline from all the transform streams passed in as arguments by piping them together and returning a single stream object that lets you write to the first stream and read from the last stream. + +If any of the streams in the pipeline emits an error or gets destroyed, or you destroy the stream it returns, all of the streams will be destroyed and cleaned up for you. + +#### original module + +`miss.pipeline` is provided by [`require('pumpify')`](https://npmjs.org/pumpify) + +#### example + +```js +// first create some transform streams (note: these two modules are fictional) +var imageResize = require('image-resizer-stream')({width: 400}) +var pngOptimizer = require('png-optimizer-stream')({quality: 60}) + +// instead of doing a.pipe(b), use pipelin +var resizeAndOptimize = miss.pipeline(imageResize, pngOptimizer) +// `resizeAndOptimize` is a transform stream. when you write to it, it writes +// to `imageResize`. when you read from it, it reads from `pngOptimizer`. +// it handles piping all the streams together for you + +// use it like any other transform stream +var fs = require('fs') + +var read = fs.createReadStream('./image.png') +var write = fs.createWriteStream('./resized-and-optimized.png') + +miss.pipe(read, resizeAndOptimize, write, function (err) { + if (err) return console.error('Image processing error!', err) + console.log('Image processed successfully') +}) +``` + +### duplex + +##### `var duplex = miss.duplex([writable, readable, opts])` + +Take two separate streams, a writable and a readable, and turn them into a single [duplex (readable and writable) stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex). + +The returned stream will emit data from the readable. When you write to it it writes to the writable. + +You can either choose to supply the writable and the readable at the time you create the stream, or you can do it later using the `.setWritable` and `.setReadable` methods and data written to the stream in the meantime will be buffered for you. + +#### original module + +`miss.duplex` is provided by [`require('duplexify')`](https://npmjs.org/duplexify) + +#### example + +```js +// lets spawn a process and take its stdout and stdin and combine them into 1 stream +var child = require('child_process') + +// @- tells it to read from stdin, --data-binary sets 'raw' binary mode +var curl = child.spawn('curl -X POST --data-binary @- http://foo.com') + +// duplexCurl will write to stdin and read from stdout +var duplexCurl = miss.duplex(curl.stdin, curl.stdout) +``` + +### through + +#####`var transformer = miss.through([options, transformFunction, flushFunction])` + +Make a custom [transform stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform). + +The `options` object is passed to the internal transform stream and can be used to create an `objectMode` stream (or use the shortcut `miss.through.obj([...])`) + +The `transformFunction` is called when data is available for the writable side and has the signature `(chunk, encoding, cb)`. Within the function, add data to the readable side any number of times with `this.push(data)`. Call `cb()` to indicate processing of the `chunk` is complete. Or to easily emit a single error or chunk, call `cb(err, chunk)` + +The `flushFunction`, with signature `(cb)`, is called just before the stream is complete and should be used to wrap up stream processing. + +#### original module + +`miss.through` is provided by [`require('through2')`](https://npmjs.org/through2) + +#### example + +```js +var fs = require('fs') + +var read = fs.createReadStream('./boring_lowercase.txt') +var write = fs.createWriteStream('./AWESOMECASE.TXT') + +// Leaving out the options object +var uppercaser = miss.through( + function (chunk, enc, cb) { + cb(chunk.toString().toUpperCase()) + }, + function (cb) { + cb('ONE LAST BIT OF UPPERCASE') + } +) + +miss.pipe(read, uppercaser, write, function (err) { + if (err) return console.error('Trouble uppercasing!') + console.log('Splendid uppercasing!') +}) +``` + +### from + +#####`miss.from([opts], read)` + +Make a custom [readable stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_readable). + +`opts` contains the options to pass on to the ReadableStream constructor e.g. for creating a readable object stream (or use the shortcut `miss.from.obj([...])`). + +Returns a readable stream that calls `read(size, next)` when data is requested from the stream. + +- `size` is the recommended amount of data (in bytes) to retrieve. +- `next(err, chunk)` should be called when you're ready to emit more data. + +#### original module + +`miss.from` is provided by [`require('from2')`](https://npmjs.org/from2) + +#### example + +```js + + +function fromString(string) { + return miss.from(function(size, next) { + // if there's no more content + // left in the string, close the stream. + if (string.length <= 0) return next(null, null) + + // Pull in a new chunk of text, + // removing it from the string. + var chunk = string.slice(0, size) + string = string.slice(size) + + // Emit "chunk" from the stream. + next(null, chunk) + }) +} + +// pipe "hello world" out +// to stdout. +fromString('hello world').pipe(process.stdout) +``` + +### to + +#####`miss.to([options], write, [flush])` + +Make a custom [writable stream](https://nodejs.org/docs/latest/api/stream.html#stream_class_stream_writable). + +`opts` contains the options to pass on to the WritableStream constructor e.g. for creating a readable object stream (or use the shortcut `miss.to.obj([...])`). + +Returns a writable stream that calls `write(data, enc, cb)` when data is written to the stream. + +- `data` is the received data to write the destination. +- `enc` encoding of the piece of data received. +- `next(err, chunk)` should be called when you're ready to write more data, or encountered an error. + +`flush(cb)` is called before `finish` is emitted and allows for cleanup steps to occur. + +#### original module + +`miss.to` is provided by [`require('flush-write-stream')`](https://npmjs.org/flush-write-stream) + +#### example + +```js +var ws = miss.to(write, flush) + +ws.on('finish', function () { + console.log('finished') +}) + +ws.write('hello') +ws.write('world') +ws.end() + +function write (data, enc, cb) { + // i am your normal ._write method + console.log('writing', data.toString()) + cb() +} + +function flush (cb) { + // i am called before finish is emitted + setTimeout(cb, 1000) // wait 1 sec +} +``` + +If you run the above it will produce the following output + +``` +writing hello +writing world +(nothing happens for 1 sec) +finished +``` + +### concat + +#####`var concat = miss.concat(cb)` + +Returns a writable stream that concatenates all data written to the stream and calls a callback with the single result. + +Calling `miss.concat(cb)` returns a writable stream. `cb` is called when the writable stream is finished, e.g. when all data is done being written to it. `cb` is called with a single argument, `(data)`, which will containe the result of concatenating all the data written to the stream. + +Note that `miss.concat` will not handle stream errors for you. To handle errors, use `miss.pipe` or handle the `error` event manually. + +#### original module + +`miss.concat` is provided by [`require('concat-stream')`](https://npmjs.org/concat-stream) + +#### example + +```js +var fs = require('fs') +var concat = require('concat-stream') + +var readStream = fs.createReadStream('cat.png') +var concatStream = concat(gotPicture) + +readStream.on('error', handleError) +readStream.pipe(concatStream) + +function gotPicture(imageBuffer) { + // imageBuffer is all of `cat.png` as a node.js Buffer +} + +function handleError(err) { + // handle your error appropriately here, e.g.: + console.error(err) // print the error to STDERR + process.exit(1) // exit program with non-zero exit code +} +``` + +### finished + +#####`miss.finished(stream, cb)` + +Waits for `stream` to finish or error and then calls `cb` with `(err)`. `cb` will only be called once. `err` will be null if the stream finished without error, or else it will be populated with the error from the streams `error` event. + +This function is useful for simplifying stream handling code as it lets you handle success or error conditions in a single code path. It's used internally `miss.pipe`. + +#### original module + +`miss.finished` is provided by [`require('end-of-stream')`](https://npmjs.org/end-of-stream) + +#### example + +```js +var copySource = fs.createReadStream('./movie.mp4') +var copyDest = fs.createWriteStream('./movie-copy.mp4') + +copySource.pipe(copyDest) + +miss.finished(copyDest, function(err) { + if (err) return console.log('write failed', err) + console.log('write success') +}) +``` diff --git a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md index c12f8340c8d6c6..e2f70d22503634 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md +++ b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.1](https://github.com/tapjs/signal-exit/compare/v3.0.0...v3.0.1) (2016-09-08) + + +### Bug Fixes + +* do not listen on SIGBUS, SIGFPE, SIGSEGV and SIGILL ([#40](https://github.com/tapjs/signal-exit/issues/40)) ([5b105fb](https://github.com/tapjs/signal-exit/commit/5b105fb)) + + + # [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) diff --git a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt index c7e27478a3eff8..eead04a12162dc 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt +++ b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt @@ -1,3 +1,5 @@ +The ISC License + Copyright (c) 2015, Contributors Permission to use, copy, modify, and/or distribute this software diff --git a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json index d43a28af458026..697cc40928ac2d 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json +++ b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json @@ -10,35 +10,23 @@ "spec": ">=3.0.0 <4.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npmlog/node_modules/gauge" - ], - [ - { - "raw": "signal-exit@^3.0.0", - "scope": null, - "escapedName": "signal-exit", - "name": "signal-exit", - "rawSpec": "^3.0.0", - "spec": ">=3.0.0 <4.0.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge" + "/Users/rebecca/code/npm2/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge" ] ], - "_from": "signal-exit@^3.0.0", - "_id": "signal-exit@3.0.0", + "_from": "signal-exit@>=3.0.0 <4.0.0", + "_id": "signal-exit@3.0.1", "_inCache": true, "_location": "/node-gyp/npmlog/gauge/signal-exit", - "_nodeVersion": "5.1.0", + "_nodeVersion": "6.5.0", "_npmOperationalInternal": { "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/signal-exit-3.0.0.tgz_1465857346813_0.7961636525578797" + "tmp": "tmp/signal-exit-3.0.1.tgz_1473354783379_0.4592130535747856" }, "_npmUser": { "name": "bcoe", "email": "ben@npmjs.com" }, - "_npmVersion": "3.3.12", + "_npmVersion": "3.10.3", "_phantomChildren": {}, "_requested": { "raw": "signal-exit@^3.0.0", @@ -52,11 +40,11 @@ "_requiredBy": [ "/node-gyp/npmlog/gauge" ], - "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz", - "_shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", + "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz", + "_shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", "_shrinkwrap": null, "_spec": "signal-exit@^3.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge", + "_where": "/Users/rebecca/code/npm2/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge", "author": { "name": "Ben Coe", "email": "ben@npmjs.com" @@ -68,22 +56,22 @@ "description": "when you want to fire an event no matter how a process exits.", "devDependencies": { "chai": "^3.5.0", - "coveralls": "^2.11.2", - "nyc": "^6.4.4", + "coveralls": "^2.11.10", + "nyc": "^8.1.0", "standard": "^7.1.2", "standard-version": "^2.3.0", - "tap": "^5.7.2" + "tap": "^7.1.0" }, "directories": {}, "dist": { - "shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", - "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz" + "shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", + "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz" }, "files": [ "index.js", "signals.js" ], - "gitHead": "2bbec4e5d9f9cf1f7529b1c923d1b058e69ccf7f", + "gitHead": "6859aff54f5198c63fff91baef279b86026bde69", "homepage": "https://github.com/tapjs/signal-exit", "keywords": [ "signal", @@ -114,5 +102,5 @@ "release": "standard-version", "test": "tap --timeout=240 ./test/*.js --cov" }, - "version": "3.0.0" + "version": "3.0.1" } diff --git a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js index bc6f97ee606954..3bd67a8a554e30 100644 --- a/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js +++ b/deps/npm/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js @@ -13,15 +13,16 @@ // fatal signal like SIGWINCH or something, and then // exit, it'll end up firing `process.emit('exit')`, so // the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. module.exports = [ 'SIGABRT', 'SIGALRM', - 'SIGBUS', - 'SIGFPE', 'SIGHUP', - 'SIGILL', 'SIGINT', - 'SIGSEGV', 'SIGTERM' ] diff --git a/deps/npm/node_modules/nopt/test/basic.js b/deps/npm/node_modules/nopt/test/basic.js index d399de9209932c..17fb286951de11 100644 --- a/deps/npm/node_modules/nopt/test/basic.js +++ b/deps/npm/node_modules/nopt/test/basic.js @@ -106,8 +106,6 @@ test("other tests", function (t) { , loglevel : ["silent","win","error","warn","info","verbose","silly"] , long : Boolean , "node-version" : [false, String] - , npaturl : url - , npat : Boolean , "onload-script" : [false, String] , outfd : [Number, Stream] , parseable : Boolean diff --git a/deps/npm/node_modules/npm-registry-client/README.md b/deps/npm/node_modules/npm-registry-client/README.md index 9d81f4df39e0dd..e14159cd07fdb1 100644 --- a/deps/npm/node_modules/npm-registry-client/README.md +++ b/deps/npm/node_modules/npm-registry-client/README.md @@ -318,3 +318,11 @@ any): origin (unique combination of protocol:host:port). Passed to the [httpAgent](https://nodejs.org/api/http.html#http_agent_maxsockets). Default = 50 +* `isFromCI` {Boolean} Identify to severs if this request is coming from CI (for statistics purposes). + Default = detected from environment– primarily this is done by looking for + the CI environment variable to be set to `true`. Also accepted are the + existence of the `JENKINS_URL`, `bamboo.buildKey` and `TDDIUM` environment + variables. +* `scope` {String} The scope of the project this command is being run for. This is the + top level npm module in which a command was run. + Default = none diff --git a/deps/npm/node_modules/npm-registry-client/lib/initialize.js b/deps/npm/node_modules/npm-registry-client/lib/initialize.js index 3c12697f03cba6..174d6470a719ad 100644 --- a/deps/npm/node_modules/npm-registry-client/lib/initialize.js +++ b/deps/npm/node_modules/npm-registry-client/lib/initialize.js @@ -11,6 +11,11 @@ function initialize (uri, method, accept, headers) { this.config.sessionToken = crypto.randomBytes(8).toString('hex') this.log.verbose('request id', this.config.sessionToken) } + if (this.config.isFromCI == null) { + this.config.isFromCI = Boolean( + process.env['CI'] === 'true' || process.env['TDDIUM'] || + process.env['JENKINS_URL'] || process.env['bamboo.buildKey']) + } var opts = { url: uri, @@ -47,7 +52,11 @@ function initialize (uri, method, accept, headers) { if (this.refer) headers.referer = this.refer headers['npm-session'] = this.config.sessionToken + headers['npm-in-ci'] = String(this.config.isFromCI) headers['user-agent'] = this.config.userAgent + if (this.config.scope) { + headers['npm-scope'] = this.config.scope + } return opts } diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md index c12f8340c8d6c6..e2f70d22503634 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.1](https://github.com/tapjs/signal-exit/compare/v3.0.0...v3.0.1) (2016-09-08) + + +### Bug Fixes + +* do not listen on SIGBUS, SIGFPE, SIGSEGV and SIGILL ([#40](https://github.com/tapjs/signal-exit/issues/40)) ([5b105fb](https://github.com/tapjs/signal-exit/commit/5b105fb)) + + + # [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt index c7e27478a3eff8..eead04a12162dc 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt @@ -1,3 +1,5 @@ +The ISC License + Copyright (c) 2015, Contributors Permission to use, copy, modify, and/or distribute this software diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json index 19e71fd5ff9fde..d4ba8c443059d6 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json @@ -10,7 +10,7 @@ "spec": ">=3.0.0 <4.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npmlog/node_modules/gauge" + "/Users/rebecca/code/npm2/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge" ], [ { @@ -22,23 +22,23 @@ "spec": ">=3.0.0 <4.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge" + "/Users/rebecca/code/npm2/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge" ] ], "_from": "signal-exit@^3.0.0", - "_id": "signal-exit@3.0.0", + "_id": "signal-exit@3.0.1", "_inCache": true, "_location": "/npm-registry-client/npmlog/gauge/signal-exit", - "_nodeVersion": "5.1.0", + "_nodeVersion": "6.5.0", "_npmOperationalInternal": { "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/signal-exit-3.0.0.tgz_1465857346813_0.7961636525578797" + "tmp": "tmp/signal-exit-3.0.1.tgz_1473354783379_0.4592130535747856" }, "_npmUser": { "name": "bcoe", "email": "ben@npmjs.com" }, - "_npmVersion": "3.3.12", + "_npmVersion": "3.10.3", "_phantomChildren": {}, "_requested": { "raw": "signal-exit@^3.0.0", @@ -52,11 +52,11 @@ "_requiredBy": [ "/npm-registry-client/npmlog/gauge" ], - "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz", - "_shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", + "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz", + "_shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", "_shrinkwrap": null, "_spec": "signal-exit@^3.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge", + "_where": "/Users/rebecca/code/npm2/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge", "author": { "name": "Ben Coe", "email": "ben@npmjs.com" @@ -68,22 +68,22 @@ "description": "when you want to fire an event no matter how a process exits.", "devDependencies": { "chai": "^3.5.0", - "coveralls": "^2.11.2", - "nyc": "^6.4.4", + "coveralls": "^2.11.10", + "nyc": "^8.1.0", "standard": "^7.1.2", "standard-version": "^2.3.0", - "tap": "^5.7.2" + "tap": "^7.1.0" }, "directories": {}, "dist": { - "shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", - "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz" + "shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", + "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz" }, "files": [ "index.js", "signals.js" ], - "gitHead": "2bbec4e5d9f9cf1f7529b1c923d1b058e69ccf7f", + "gitHead": "6859aff54f5198c63fff91baef279b86026bde69", "homepage": "https://github.com/tapjs/signal-exit", "keywords": [ "signal", @@ -114,5 +114,5 @@ "release": "standard-version", "test": "tap --timeout=240 ./test/*.js --cov" }, - "version": "3.0.0" + "version": "3.0.1" } diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js index bc6f97ee606954..3bd67a8a554e30 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js @@ -13,15 +13,16 @@ // fatal signal like SIGWINCH or something, and then // exit, it'll end up firing `process.emit('exit')`, so // the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. module.exports = [ 'SIGABRT', 'SIGALRM', - 'SIGBUS', - 'SIGFPE', 'SIGHUP', - 'SIGILL', 'SIGINT', - 'SIGSEGV', 'SIGTERM' ] diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json index 362803be0df21e..77e053e2f9549e 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json @@ -10,31 +10,23 @@ "spec": ">=1.0.0 <2.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" - ], - [ - { - "raw": "number-is-nan@^1.0.0", - "scope": null, - "escapedName": "number-is-nan", - "name": "number-is-nan", - "rawSpec": "^1.0.0", - "spec": ">=1.0.0 <2.0.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" + "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" ] ], - "_from": "number-is-nan@^1.0.0", - "_id": "number-is-nan@1.0.0", + "_from": "number-is-nan@>=1.0.0 <2.0.0", + "_id": "number-is-nan@1.0.1", "_inCache": true, "_location": "/npm-registry-client/npmlog/gauge/string-width/code-point-at/number-is-nan", - "_nodeVersion": "0.12.3", + "_nodeVersion": "4.5.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/number-is-nan-1.0.1.tgz_1475212313367_0.9480371843092144" + }, "_npmUser": { "name": "sindresorhus", "email": "sindresorhus@gmail.com" }, - "_npmVersion": "2.10.0", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { "raw": "number-is-nan@^1.0.0", @@ -48,11 +40,11 @@ "_requiredBy": [ "/npm-registry-client/npmlog/gauge/string-width/code-point-at" ], - "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz", - "_shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", + "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "_shasum": "097b602b53422a522c1afb8790318336941a011d", "_shrinkwrap": null, "_spec": "number-is-nan@^1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at", + "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -62,14 +54,14 @@ "url": "https://github.com/sindresorhus/number-is-nan/issues" }, "dependencies": {}, - "description": "ES6 Number.isNaN() ponyfill", + "description": "ES2015 Number.isNaN() ponyfill", "devDependencies": { - "ava": "0.0.4" + "ava": "*" }, "directories": {}, "dist": { - "shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", - "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz" + "shasum": "097b602b53422a522c1afb8790318336941a011d", + "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" }, "engines": { "node": ">=0.10.0" @@ -77,13 +69,11 @@ "files": [ "index.js" ], - "gitHead": "0f394b1bc33185c40304363b209e3f0588dbeeb3", + "gitHead": "ed9cdac3f428cc929b61bb230da42c87477af4b9", "homepage": "https://github.com/sindresorhus/number-is-nan#readme", "keywords": [ - "es6", "es2015", "ecmascript", - "harmony", "ponyfill", "polyfill", "shim", @@ -107,7 +97,7 @@ "url": "git+https://github.com/sindresorhus/number-is-nan.git" }, "scripts": { - "test": "node test.js" + "test": "ava" }, - "version": "1.0.0" + "version": "1.0.1" } diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md index 93d851a14f1ac5..24635087120128 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md @@ -1,8 +1,6 @@ # number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) -> ES6 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) ## Install diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json index d602d507a66c07..59e664346766b9 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json @@ -10,31 +10,23 @@ "spec": ">=1.0.0 <2.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width" - ], - [ - { - "raw": "code-point-at@^1.0.0", - "scope": null, - "escapedName": "code-point-at", - "name": "code-point-at", - "rawSpec": "^1.0.0", - "spec": ">=1.0.0 <2.0.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width" + "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width" ] ], - "_from": "code-point-at@^1.0.0", - "_id": "code-point-at@1.0.0", + "_from": "code-point-at@>=1.0.0 <2.0.0", + "_id": "code-point-at@1.0.1", "_inCache": true, "_location": "/npm-registry-client/npmlog/gauge/string-width/code-point-at", - "_nodeVersion": "0.12.5", + "_nodeVersion": "4.5.0", + "_npmOperationalInternal": { + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/code-point-at-1.0.1.tgz_1475223183649_0.724906453397125" + }, "_npmUser": { "name": "sindresorhus", "email": "sindresorhus@gmail.com" }, - "_npmVersion": "2.11.2", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { "raw": "code-point-at@^1.0.0", @@ -48,11 +40,11 @@ "_requiredBy": [ "/npm-registry-client/npmlog/gauge/string-width" ], - "_resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.0.0.tgz", - "_shasum": "f69b192d3f7d91e382e4b71bddb77878619ab0c6", + "_resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.0.1.tgz", + "_shasum": "1104cd34f9b5b45d3eba88f1babc1924e1ce35fb", "_shrinkwrap": null, "_spec": "code-point-at@^1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width", + "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -66,12 +58,12 @@ }, "description": "ES2015 String#codePointAt() ponyfill", "devDependencies": { - "ava": "0.0.4" + "ava": "*" }, "directories": {}, "dist": { - "shasum": "f69b192d3f7d91e382e4b71bddb77878619ab0c6", - "tarball": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.0.0.tgz" + "shasum": "1104cd34f9b5b45d3eba88f1babc1924e1ce35fb", + "tarball": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.0.1.tgz" }, "engines": { "node": ">=0.10.0" @@ -79,11 +71,10 @@ "files": [ "index.js" ], - "gitHead": "c2ffa4064718b37c84c73a633abeeed5b486a469", - "homepage": "https://github.com/sindresorhus/code-point-at", + "gitHead": "502d72c5a959275e5d90f9c6641589756af44085", + "homepage": "https://github.com/sindresorhus/code-point-at#readme", "keywords": [ "es2015", - "es6", "ponyfill", "polyfill", "shim", @@ -110,7 +101,7 @@ "url": "git+https://github.com/sindresorhus/code-point-at.git" }, "scripts": { - "test": "node test.js" + "test": "ava" }, - "version": "1.0.0" + "version": "1.0.1" } diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md index 71e7d0931b8b0c..ef9713f1b010c2 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md @@ -1,8 +1,6 @@ # code-point-at [![Build Status](https://travis-ci.org/sindresorhus/code-point-at.svg?branch=master)](https://travis-ci.org/sindresorhus/code-point-at) -> ES2015 [`String#codePointAt()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method +> ES2015 [`String#codePointAt()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt) [ponyfill](https://ponyfill.com) ## Install diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json index 3b853d672bca99..cf107ae74bd679 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json @@ -10,7 +10,7 @@ "spec": ">=1.0.0 <2.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" + "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" ], [ { @@ -22,19 +22,23 @@ "spec": ">=1.0.0 <2.0.0", "type": "range" }, - "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point" + "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point" ] ], "_from": "number-is-nan@^1.0.0", - "_id": "number-is-nan@1.0.0", + "_id": "number-is-nan@1.0.1", "_inCache": true, "_location": "/npm-registry-client/npmlog/gauge/string-width/is-fullwidth-code-point/number-is-nan", - "_nodeVersion": "0.12.3", + "_nodeVersion": "4.5.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/number-is-nan-1.0.1.tgz_1475212313367_0.9480371843092144" + }, "_npmUser": { "name": "sindresorhus", "email": "sindresorhus@gmail.com" }, - "_npmVersion": "2.10.0", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { "raw": "number-is-nan@^1.0.0", @@ -48,11 +52,11 @@ "_requiredBy": [ "/npm-registry-client/npmlog/gauge/string-width/is-fullwidth-code-point" ], - "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz", - "_shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", + "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "_shasum": "097b602b53422a522c1afb8790318336941a011d", "_shrinkwrap": null, "_spec": "number-is-nan@^1.0.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point", + "_where": "/Users/rebecca/code/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", @@ -62,14 +66,14 @@ "url": "https://github.com/sindresorhus/number-is-nan/issues" }, "dependencies": {}, - "description": "ES6 Number.isNaN() ponyfill", + "description": "ES2015 Number.isNaN() ponyfill", "devDependencies": { - "ava": "0.0.4" + "ava": "*" }, "directories": {}, "dist": { - "shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", - "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz" + "shasum": "097b602b53422a522c1afb8790318336941a011d", + "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" }, "engines": { "node": ">=0.10.0" @@ -77,13 +81,11 @@ "files": [ "index.js" ], - "gitHead": "0f394b1bc33185c40304363b209e3f0588dbeeb3", + "gitHead": "ed9cdac3f428cc929b61bb230da42c87477af4b9", "homepage": "https://github.com/sindresorhus/number-is-nan#readme", "keywords": [ - "es6", "es2015", "ecmascript", - "harmony", "ponyfill", "polyfill", "shim", @@ -107,7 +109,7 @@ "url": "git+https://github.com/sindresorhus/number-is-nan.git" }, "scripts": { - "test": "node test.js" + "test": "ava" }, - "version": "1.0.0" + "version": "1.0.1" } diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md index 93d851a14f1ac5..24635087120128 100644 --- a/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md +++ b/deps/npm/node_modules/npm-registry-client/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md @@ -1,8 +1,6 @@ # number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) -> ES6 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) ## Install diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/.npmignore b/deps/npm/node_modules/npm-registry-client/node_modules/retry/.npmignore deleted file mode 100644 index e7726a071b7f39..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/.npmignore +++ /dev/null @@ -1,2 +0,0 @@ -/node_modules/* -npm-debug.log diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/Makefile b/deps/npm/node_modules/npm-registry-client/node_modules/retry/Makefile deleted file mode 100644 index eee21a99dfc9ec..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -SHELL := /bin/bash - -test: - @node test/runner.js - -release-major: test - npm version major -m "Release %s" - git push - npm publish - -release-minor: test - npm version minor -m "Release %s" - git push - npm publish - -release-patch: test - npm version patch -m "Release %s" - git push - npm publish - -.PHONY: test - diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/README.md b/deps/npm/node_modules/npm-registry-client/node_modules/retry/README.md deleted file mode 100644 index eee05f7bb61537..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/README.md +++ /dev/null @@ -1,215 +0,0 @@ -# retry - -Abstraction for exponential and custom retry strategies for failed operations. - -## Installation - - npm install retry - -## Current Status - -This module has been tested and is ready to be used. - -## Tutorial - -The example below will retry a potentially failing `dns.resolve` operation -`10` times using an exponential backoff strategy. With the default settings, this -means the last attempt is made after `17 minutes and 3 seconds`. - -``` javascript -var dns = require('dns'); -var retry = require('retry'); - -function faultTolerantResolve(address, cb) { - var operation = retry.operation(); - - operation.attempt(function(currentAttempt) { - dns.resolve(address, function(err, addresses) { - if (operation.retry(err)) { - return; - } - - cb(err ? operation.mainError() : null, addresses); - }); - }); -} - -faultTolerantResolve('nodejs.org', function(err, addresses) { - console.log(err, addresses); -}); -``` - -Of course you can also configure the factors that go into the exponential -backoff. See the API documentation below for all available settings. -currentAttempt is an int representing the number of attempts so far. - -``` javascript -var operation = retry.operation({ - retries: 5, - factor: 3, - minTimeout: 1 * 1000, - maxTimeout: 60 * 1000, - randomize: true, -}); -``` - -## API - -### retry.operation([options]) - -Creates a new `RetryOperation` object. `options` is the same as `retry.timeouts()`'s `options`, with two additions: - -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. - -### retry.timeouts([options]) - -Returns an array of timeouts. All time `options` and return values are in -milliseconds. If `options` is an array, a copy of that array is returned. - -`options` is a JS object that can contain any of the following keys: - -* `retries`: The maximum amount of times to retry the operation. Default is `10`. -* `factor`: The exponential factor to use. Default is `2`. -* `minTimeout`: The number of milliseconds before starting the first retry. Default is `1000`. -* `maxTimeout`: The maximum number of milliseconds between two retries. Default is `Infinity`. -* `randomize`: Randomizes the timeouts by multiplying with a factor between `1` to `2`. Default is `false`. - -The formula used to calculate the individual timeouts is: - -``` -Math.min(random * minTimeout * Math.pow(factor, attempt), maxTimeout) -``` - -Have a look at [this article][article] for a better explanation of approach. - -If you want to tune your `factor` / `times` settings to attempt the last retry -after a certain amount of time, you can use wolfram alpha. For example in order -to tune for `10` attempts in `5 minutes`, you can use this equation: - -![screenshot](https://github.com/tim-kos/node-retry/raw/master/equation.gif) - -Explaining the various values from left to right: - -* `k = 0 ... 9`: The `retries` value (10) -* `1000`: The `minTimeout` value in ms (1000) -* `x^k`: No need to change this, `x` will be your resulting factor -* `5 * 60 * 1000`: The desired total amount of time for retrying in ms (5 minutes) - -To make this a little easier for you, use wolfram alpha to do the calculations: - - - -[article]: http://dthain.blogspot.com/2009/02/exponential-backoff-in-distributed.html - -### retry.createTimeout(attempt, opts) - -Returns a new `timeout` (integer in milliseconds) based on the given parameters. - -`attempt` is an integer representing for which retry the timeout should be calculated. If your retry operation was executed 4 times you had one attempt and 3 retries. If you then want to calculate a new timeout, you should set `attempt` to 4 (attempts are zero-indexed). - -`opts` can include `factor`, `minTimeout`, `randomize` (boolean) and `maxTimeout`. They are documented above. - -`retry.createTimeout()` is used internally by `retry.timeouts()` and is public for you to be able to create your own timeouts for reinserting an item, see [issue #13](https://github.com/tim-kos/node-retry/issues/13). - -### retry.wrap(obj, [options], [methodNames]) - -Wrap all functions of the `obj` with retry. Optionally you can pass operation options and -an array of method names which need to be wrapped. - -``` -retry.wrap(obj) - -retry.wrap(obj, ['method1', 'method2']) - -retry.wrap(obj, {retries: 3}) - -retry.wrap(obj, {retries: 3}, ['method1', 'method2']) -``` -The `options` object can take any options that the usual call to `retry.operation` can take. - -### new RetryOperation(timeouts, [options]) - -Creates a new `RetryOperation` where `timeouts` is an array where each value is -a timeout given in milliseconds. - -Available options: -* `forever`: Whether to retry forever, defaults to `false`. -* `unref`: Wether to [unref](https://nodejs.org/api/timers.html#timers_unref) the setTimeout's, defaults to `false`. - -If `forever` is true, the following changes happen: -* `RetryOperation.errors()` will only output an array of one item: the last error. -* `RetryOperation` will repeatedly use the `timeouts` array. Once all of its timeouts have been used up, it restarts with the first timeout, then uses the second and so on. - -#### retryOperation.errors() - -Returns an array of all errors that have been passed to -`retryOperation.retry()` so far. - -#### retryOperation.mainError() - -A reference to the error object that occured most frequently. Errors are -compared using the `error.message` property. - -If multiple error messages occured the same amount of time, the last error -object with that message is returned. - -If no errors occured so far, the value is `null`. - -#### retryOperation.attempt(fn, timeoutOps) - -Defines the function `fn` that is to be retried and executes it for the first -time right away. The `fn` function can receive an optional `currentAttempt` callback that represents the number of attempts to execute `fn` so far. - -Optionally defines `timeoutOps` which is an object having a property `timeout` in miliseconds and a property `cb` callback function. -Whenever your retry operation takes longer than `timeout` to execute, the timeout callback function `cb` is called. - - -#### retryOperation.try(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.start(fn) - -This is an alias for `retryOperation.attempt(fn)`. This is deprecated. Please use `retryOperation.attempt(fn)` instead. - -#### retryOperation.retry(error) - -Returns `false` when no `error` value is given, or the maximum amount of retries -has been reached. - -Otherwise it returns `true`, and retries the operation after the timeout for -the current attempt number. - -#### retryOperation.stop() - -Allows you to stop the operation being retried. Useful for aborting the operation on a fatal error etc. - -#### retryOperation.attempts() - -Returns an int representing the number of attempts it took to call `fn` before it was successful. - -## License - -retry is licensed under the MIT license. - - -# Changelog - -0.10.0 Adding `stop` functionality, thanks to @maxnachlinger. - -0.9.0 Adding `unref` functionality, thanks to @satazor. - -0.8.0 Implementing retry.wrap. - -0.7.0 Some bug fixes and made retry.createTimeout() public. Fixed issues [#10](https://github.com/tim-kos/node-retry/issues/10), [#12](https://github.com/tim-kos/node-retry/issues/12), and [#13](https://github.com/tim-kos/node-retry/issues/13). - -0.6.0 Introduced optional timeOps parameter for the attempt() function which is an object having a property timeout in milliseconds and a property cb callback function. Whenever your retry operation takes longer than timeout to execute, the timeout callback function cb is called. - -0.5.0 Some minor refactoring. - -0.4.0 Changed retryOperation.try() to retryOperation.attempt(). Deprecated the aliases start() and try() for it. - -0.3.0 Added retryOperation.start() which is an alias for retryOperation.try(). - -0.2.0 Added attempts() function and parameter to retryOperation.try() representing the number of attempts it took to call fn(). diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/equation.gif b/deps/npm/node_modules/npm-registry-client/node_modules/retry/equation.gif deleted file mode 100644 index 97107237ba19f51997d8d76135bc7d1486d856f0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1209 zcmV;q1V;NuNk%w1VXpu&0M!5h000001ONyK2nY-a5D*X$6c88~7#JKFARr(hBp@j$ zDJd)|F)%SPG%-0iIXOHzK|n!4L_tbON=i&hQczM-RZ?16T3TINVqs!pWnyY+YHDq2 zb8&NXb#r@pdwYF*gMovCg@cQUi;Inml#!H_m6V*BoSdDUq@kpwrKGH?tgNoAw6e6c zwzR#vy}iD@#lpqK#>LIb&CSlu)za0~*45tH-rnBc=Hlk&=H~9|?(XjH_VV`j_V)k! z|NsC0EC2ui0IvWs000L6z@KnPEENVOfMTEH9c0Z7p9z3<`87kr4n!IH|Ew$buF^Tr6-3^@midQKv4UFk?fCD@~8E z@HgbfvLPrU7IV4gfp|8%C^H$l;qq zLJ;`y;|7BS2YlpEz->xcBQ#7@yHNtNkOmwQ1ek!X@sGzuLXR#jx2fyLw;309jQGe6 zL`?+$umPZ&50}J^BQGxGIN%{G2=u5hqw|pm*t2Ul0ssMk0vb%GI^lz~c)})l{~Qc?h2kCMJmBf=4KTfq+A}mV<6G&6wD3KiFu51s1j8f&fS0 zFaiqI41q&$@ZBIIl0*neBoe|cd1H+<3Zdf>DJ(#i62j@_f)Fj-_2my?IyGjQMd%>G z07WXH-J3lkxMd6n7?DE>JIL@P5d*{^#0>(>vA~&p4RL3ldlu2^8P z!OlGQ%z<|`+iWomtGr?~EJ7!(^wLZ>?ex=7N4-QZ)=BNMGD+xg!3P&;Y_%-ZByj;I zEWG$NFy8zC&JhLd@WT!ToDGaV{P^?c4^0Iv_b4i{ghbnK$GtZyTzMtL-DCey_TZ>w XwprD$S>S;MUNdg_<(OxVL=XTw-hl|W diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/dns.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/dns.js deleted file mode 100644 index 446729b6f9af6b..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/dns.js +++ /dev/null @@ -1,31 +0,0 @@ -var dns = require('dns'); -var retry = require('../lib/retry'); - -function faultTolerantResolve(address, cb) { - var opts = { - retries: 2, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: 2 * 1000, - randomize: true - }; - var operation = retry.operation(opts); - - operation.attempt(function(currentAttempt) { - dns.resolve(address, function(err, addresses) { - if (operation.retry(err)) { - return; - } - - cb(operation.mainError(), operation.errors(), addresses); - }); - }); -} - -faultTolerantResolve('nodejs.org', function(err, errors, addresses) { - console.warn('err:'); - console.log(err); - - console.warn('addresses:'); - console.log(addresses); -}); \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/stop.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/stop.js deleted file mode 100644 index e1ceafeebafc51..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/example/stop.js +++ /dev/null @@ -1,40 +0,0 @@ -var retry = require('../lib/retry'); - -function attemptAsyncOperation(someInput, cb) { - var opts = { - retries: 2, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: 2 * 1000, - randomize: true - }; - var operation = retry.operation(opts); - - operation.attempt(function(currentAttempt) { - failingAsyncOperation(someInput, function(err, result) { - - if (err && err.message === 'A fatal error') { - operation.stop(); - return cb(err); - } - - if (operation.retry(err)) { - return; - } - - cb(operation.mainError(), operation.errors(), result); - }); - }); -} - -attemptAsyncOperation('test input', function(err, errors, result) { - console.warn('err:'); - console.log(err); - - console.warn('result:'); - console.log(result); -}); - -function failingAsyncOperation(input, cb) { - return setImmediate(cb.bind(null, new Error('A fatal error'))); -} diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/index.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/index.js deleted file mode 100644 index ee62f3a112c28b..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/index.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require('./lib/retry'); \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry.js deleted file mode 100644 index 77428cfd0006fa..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry.js +++ /dev/null @@ -1,99 +0,0 @@ -var RetryOperation = require('./retry_operation'); - -exports.operation = function(options) { - var timeouts = exports.timeouts(options); - return new RetryOperation(timeouts, { - forever: options && options.forever, - unref: options && options.unref - }); -}; - -exports.timeouts = function(options) { - if (options instanceof Array) { - return [].concat(options); - } - - var opts = { - retries: 10, - factor: 2, - minTimeout: 1 * 1000, - maxTimeout: Infinity, - randomize: false - }; - for (var key in options) { - opts[key] = options[key]; - } - - if (opts.minTimeout > opts.maxTimeout) { - throw new Error('minTimeout is greater than maxTimeout'); - } - - var timeouts = []; - for (var i = 0; i < opts.retries; i++) { - timeouts.push(this.createTimeout(i, opts)); - } - - if (options && options.forever && !timeouts.length) { - timeouts.push(this.createTimeout(i, opts)); - } - - // sort the array numerically ascending - timeouts.sort(function(a,b) { - return a - b; - }); - - return timeouts; -}; - -exports.createTimeout = function(attempt, opts) { - var random = (opts.randomize) - ? (Math.random() + 1) - : 1; - - var timeout = Math.round(random * opts.minTimeout * Math.pow(opts.factor, attempt)); - timeout = Math.min(timeout, opts.maxTimeout); - - return timeout; -}; - -exports.wrap = function(obj, options, methods) { - if (options instanceof Array) { - methods = options; - options = null; - } - - if (!methods) { - methods = []; - for (var key in obj) { - if (typeof obj[key] === 'function') { - methods.push(key); - } - } - } - - for (var i = 0; i < methods.length; i++) { - var method = methods[i]; - var original = obj[method]; - - obj[method] = function retryWrapper() { - var op = exports.operation(options); - var args = Array.prototype.slice.call(arguments); - var callback = args.pop(); - - args.push(function(err) { - if (op.retry(err)) { - return; - } - if (err) { - arguments[0] = op.mainError(); - } - callback.apply(this, arguments); - }); - - op.attempt(function() { - original.apply(obj, args); - }); - }; - obj[method].options = options; - } -}; diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js deleted file mode 100644 index 2b3db8e1776973..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/lib/retry_operation.js +++ /dev/null @@ -1,143 +0,0 @@ -function RetryOperation(timeouts, options) { - // Compatibility for the old (timeouts, retryForever) signature - if (typeof options === 'boolean') { - options = { forever: options }; - } - - this._timeouts = timeouts; - this._options = options || {}; - this._fn = null; - this._errors = []; - this._attempts = 1; - this._operationTimeout = null; - this._operationTimeoutCb = null; - this._timeout = null; - - if (this._options.forever) { - this._cachedTimeouts = this._timeouts.slice(0); - } -} -module.exports = RetryOperation; - -RetryOperation.prototype.stop = function() { - if (this._timeout) { - clearTimeout(this._timeout); - } - - this._timeouts = []; - this._cachedTimeouts = null; -}; - -RetryOperation.prototype.retry = function(err) { - if (this._timeout) { - clearTimeout(this._timeout); - } - - if (!err) { - return false; - } - - this._errors.push(err); - - var timeout = this._timeouts.shift(); - if (timeout === undefined) { - if (this._cachedTimeouts) { - // retry forever, only keep last error - this._errors.splice(this._errors.length - 1, this._errors.length); - this._timeouts = this._cachedTimeouts.slice(0); - timeout = this._timeouts.shift(); - } else { - return false; - } - } - - var self = this; - var timer = setTimeout(function() { - self._attempts++; - - if (self._operationTimeoutCb) { - self._timeout = setTimeout(function() { - self._operationTimeoutCb(self._attempts); - }, self._operationTimeout); - - if (this._options.unref) { - self._timeout.unref(); - } - } - - self._fn(self._attempts); - }, timeout); - - if (this._options.unref) { - timer.unref(); - } - - return true; -}; - -RetryOperation.prototype.attempt = function(fn, timeoutOps) { - this._fn = fn; - - if (timeoutOps) { - if (timeoutOps.timeout) { - this._operationTimeout = timeoutOps.timeout; - } - if (timeoutOps.cb) { - this._operationTimeoutCb = timeoutOps.cb; - } - } - - var self = this; - if (this._operationTimeoutCb) { - this._timeout = setTimeout(function() { - self._operationTimeoutCb(); - }, self._operationTimeout); - } - - this._fn(this._attempts); -}; - -RetryOperation.prototype.try = function(fn) { - console.log('Using RetryOperation.try() is deprecated'); - this.attempt(fn); -}; - -RetryOperation.prototype.start = function(fn) { - console.log('Using RetryOperation.start() is deprecated'); - this.attempt(fn); -}; - -RetryOperation.prototype.start = RetryOperation.prototype.try; - -RetryOperation.prototype.errors = function() { - return this._errors; -}; - -RetryOperation.prototype.attempts = function() { - return this._attempts; -}; - -RetryOperation.prototype.mainError = function() { - if (this._errors.length === 0) { - return null; - } - - var counts = {}; - var mainError = null; - var mainErrorCount = 0; - - for (var i = 0; i < this._errors.length; i++) { - var error = this._errors[i]; - var message = error.message; - var count = (counts[message] || 0) + 1; - - counts[message] = count; - - if (count >= mainErrorCount) { - mainError = error; - mainErrorCount = count; - } - } - - return mainError; -}; diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/package.json b/deps/npm/node_modules/npm-registry-client/node_modules/retry/package.json deleted file mode 100644 index 0ba214a6fd0a14..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/package.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "retry@^0.10.0", - "scope": null, - "escapedName": "retry", - "name": "retry", - "rawSpec": "^0.10.0", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client" - ] - ], - "_from": "retry@>=0.10.0 <0.11.0", - "_id": "retry@0.10.0", - "_inCache": true, - "_location": "/npm-registry-client/retry", - "_nodeVersion": "4.2.1", - "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/retry-0.10.0.tgz_1471682099847_0.5031970851123333" - }, - "_npmUser": { - "name": "tim-kos", - "email": "tim@debuggable.com" - }, - "_npmVersion": "2.1.7", - "_phantomChildren": {}, - "_requested": { - "raw": "retry@^0.10.0", - "scope": null, - "escapedName": "retry", - "name": "retry", - "rawSpec": "^0.10.0", - "spec": ">=0.10.0 <0.11.0", - "type": "range" - }, - "_requiredBy": [ - "/npm-registry-client" - ], - "_resolved": "https://registry.npmjs.org/retry/-/retry-0.10.0.tgz", - "_shasum": "649e15ca408422d98318161935e7f7d652d435dd", - "_shrinkwrap": null, - "_spec": "retry@^0.10.0", - "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-registry-client", - "author": { - "name": "Tim Koschützki", - "email": "tim@debuggable.com", - "url": "http://debuggable.com/" - }, - "bugs": { - "url": "https://github.com/tim-kos/node-retry/issues" - }, - "dependencies": {}, - "description": "Abstraction for exponential and custom retry strategies for failed operations.", - "devDependencies": { - "fake": "0.2.0", - "far": "0.0.1" - }, - "directories": { - "lib": "./lib" - }, - "dist": { - "shasum": "649e15ca408422d98318161935e7f7d652d435dd", - "tarball": "https://registry.npmjs.org/retry/-/retry-0.10.0.tgz" - }, - "engines": { - "node": "*" - }, - "gitHead": "0616e6a6ebc49b5a36b619c8f7c414ced8c3813b", - "homepage": "https://github.com/tim-kos/node-retry", - "license": "MIT", - "main": "index", - "maintainers": [ - { - "name": "tim-kos", - "email": "tim@debuggable.com" - } - ], - "name": "retry", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/tim-kos/node-retry.git" - }, - "scripts": {}, - "version": "0.10.0" -} diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/common.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/common.js deleted file mode 100644 index 224720696ebac8..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/common.js +++ /dev/null @@ -1,10 +0,0 @@ -var common = module.exports; -var path = require('path'); - -var rootDir = path.join(__dirname, '..'); -common.dir = { - lib: rootDir + '/lib' -}; - -common.assert = require('assert'); -common.fake = require('fake'); \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js deleted file mode 100644 index b41307cb529f12..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-forever.js +++ /dev/null @@ -1,24 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var retry = require(common.dir.lib + '/retry'); - -(function testForeverUsesFirstTimeout() { - var operation = retry.operation({ - retries: 0, - minTimeout: 100, - maxTimeout: 100, - forever: true - }); - - operation.attempt(function(numAttempt) { - console.log('>numAttempt', numAttempt); - var err = new Error("foo"); - if (numAttempt == 10) { - operation.stop(); - } - - if (operation.retry(err)) { - return; - } - }); -})(); diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js deleted file mode 100644 index 916936424f073b..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-operation.js +++ /dev/null @@ -1,176 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var fake = common.fake.create(); -var retry = require(common.dir.lib + '/retry'); - -(function testErrors() { - var operation = retry.operation(); - - var error = new Error('some error'); - var error2 = new Error('some other error'); - operation._errors.push(error); - operation._errors.push(error2); - - assert.deepEqual(operation.errors(), [error, error2]); -})(); - -(function testMainErrorReturnsMostFrequentError() { - var operation = retry.operation(); - var error = new Error('some error'); - var error2 = new Error('some other error'); - - operation._errors.push(error); - operation._errors.push(error2); - operation._errors.push(error); - - assert.strictEqual(operation.mainError(), error); -})(); - -(function testMainErrorReturnsLastErrorOnEqualCount() { - var operation = retry.operation(); - var error = new Error('some error'); - var error2 = new Error('some other error'); - - operation._errors.push(error); - operation._errors.push(error2); - - assert.strictEqual(operation.mainError(), error2); -})(); - -(function testAttempt() { - var operation = retry.operation(); - var fn = new Function(); - - var timeoutOpts = { - timeout: 1, - cb: function() {} - }; - operation.attempt(fn, timeoutOpts); - - assert.strictEqual(fn, operation._fn); - assert.strictEqual(timeoutOpts.timeout, operation._operationTimeout); - assert.strictEqual(timeoutOpts.cb, operation._operationTimeoutCb); -})(); - -(function testRetry() { - var times = 3; - var error = new Error('some error'); - var operation = retry.operation([1, 2, 3]); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (operation.retry(error)) { - return; - } - - assert.strictEqual(attempts, 4); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testRetryForever() { - var error = new Error('some error'); - var operation = retry.operation({ retries: 3, forever: true }); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (attempts !== 6 && operation.retry(error)) { - return; - } - - assert.strictEqual(attempts, 6); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testRetryForeverNoRetries() { - var error = new Error('some error'); - var delay = 50 - var operation = retry.operation({ - retries: null, - forever: true, - minTimeout: delay, - maxTimeout: delay - }); - - var attempts = 0; - var startTime = new Date().getTime(); - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - if (attempts !== 4 && operation.retry(error)) { - return; - } - - var endTime = new Date().getTime(); - var minTime = startTime + (delay * 3); - var maxTime = minTime + 20 // add a little headroom for code execution time - assert(endTime > minTime) - assert(endTime < maxTime) - assert.strictEqual(attempts, 4); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - }); - }; - - fn(); -})(); - -(function testStop() { - var error = new Error('some error'); - var operation = retry.operation([1, 2, 3]); - var attempts = 0; - - var finalCallback = fake.callback('finalCallback'); - fake.expectAnytime(finalCallback); - - var fn = function() { - operation.attempt(function(currentAttempt) { - attempts++; - assert.equal(currentAttempt, attempts); - - if (attempts === 2) { - operation.stop(); - - assert.strictEqual(attempts, 2); - assert.strictEqual(operation.attempts(), attempts); - assert.strictEqual(operation.mainError(), error); - finalCallback(); - } - - if (operation.retry(error)) { - return; - } - }); - }; - - fn(); -})(); diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js deleted file mode 100644 index 7ca8bc7eb596b5..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-retry-wrap.js +++ /dev/null @@ -1,77 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var fake = common.fake.create(); -var retry = require(common.dir.lib + '/retry'); - -function getLib() { - return { - fn1: function() {}, - fn2: function() {}, - fn3: function() {} - }; -} - -(function wrapAll() { - var lib = getLib(); - retry.wrap(lib); - assert.equal(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); -}()); - -(function wrapAllPassOptions() { - var lib = getLib(); - retry.wrap(lib, {retries: 2}); - assert.equal(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); - assert.equal(lib.fn1.options.retries, 2); - assert.equal(lib.fn2.options.retries, 2); - assert.equal(lib.fn3.options.retries, 2); -}()); - -(function wrapDefined() { - var lib = getLib(); - retry.wrap(lib, ['fn2', 'fn3']); - assert.notEqual(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); -}()); - -(function wrapDefinedAndPassOptions() { - var lib = getLib(); - retry.wrap(lib, {retries: 2}, ['fn2', 'fn3']); - assert.notEqual(lib.fn1.name, 'retryWrapper'); - assert.equal(lib.fn2.name, 'retryWrapper'); - assert.equal(lib.fn3.name, 'retryWrapper'); - assert.equal(lib.fn2.options.retries, 2); - assert.equal(lib.fn3.options.retries, 2); -}()); - -(function runWrappedWithoutError() { - var callbackCalled; - var lib = {method: function(a, b, callback) { - assert.equal(a, 1); - assert.equal(b, 2); - assert.equal(typeof callback, 'function'); - callback(); - }}; - retry.wrap(lib); - lib.method(1, 2, function() { - callbackCalled = true; - }); - assert.ok(callbackCalled); -}()); - -(function runWrappedWithError() { - var callbackCalled; - var lib = {method: function(callback) { - callback(new Error('Some error')); - }}; - retry.wrap(lib, {retries: 1}); - lib.method(function(err) { - callbackCalled = true; - assert.ok(err instanceof Error); - }); - assert.ok(!callbackCalled); -}()); diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js deleted file mode 100644 index 7206b0fb0b01d0..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/integration/test-timeouts.js +++ /dev/null @@ -1,69 +0,0 @@ -var common = require('../common'); -var assert = common.assert; -var retry = require(common.dir.lib + '/retry'); - -(function testDefaultValues() { - var timeouts = retry.timeouts(); - - assert.equal(timeouts.length, 10); - assert.equal(timeouts[0], 1000); - assert.equal(timeouts[1], 2000); - assert.equal(timeouts[2], 4000); -})(); - -(function testDefaultValuesWithRandomize() { - var minTimeout = 5000; - var timeouts = retry.timeouts({ - minTimeout: minTimeout, - randomize: true - }); - - assert.equal(timeouts.length, 10); - assert.ok(timeouts[0] > minTimeout); - assert.ok(timeouts[1] > timeouts[0]); - assert.ok(timeouts[2] > timeouts[1]); -})(); - -(function testPassedTimeoutsAreUsed() { - var timeoutsArray = [1000, 2000, 3000]; - var timeouts = retry.timeouts(timeoutsArray); - assert.deepEqual(timeouts, timeoutsArray); - assert.notStrictEqual(timeouts, timeoutsArray); -})(); - -(function testTimeoutsAreWithinBoundaries() { - var minTimeout = 1000; - var maxTimeout = 10000; - var timeouts = retry.timeouts({ - minTimeout: minTimeout, - maxTimeout: maxTimeout - }); - for (var i = 0; i < timeouts; i++) { - assert.ok(timeouts[i] >= minTimeout); - assert.ok(timeouts[i] <= maxTimeout); - } -})(); - -(function testTimeoutsAreIncremental() { - var timeouts = retry.timeouts(); - var lastTimeout = timeouts[0]; - for (var i = 0; i < timeouts; i++) { - assert.ok(timeouts[i] > lastTimeout); - lastTimeout = timeouts[i]; - } -})(); - -(function testTimeoutsAreIncrementalForFactorsLessThanOne() { - var timeouts = retry.timeouts({ - retries: 3, - factor: 0.5 - }); - - var expected = [250, 500, 1000]; - assert.deepEqual(expected, timeouts); -})(); - -(function testRetries() { - var timeouts = retry.timeouts({retries: 2}); - assert.strictEqual(timeouts.length, 2); -})(); diff --git a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/runner.js b/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/runner.js deleted file mode 100644 index e0ee2f570fe3c0..00000000000000 --- a/deps/npm/node_modules/npm-registry-client/node_modules/retry/test/runner.js +++ /dev/null @@ -1,5 +0,0 @@ -var far = require('far').create(); - -far.add(__dirname); -far.include(/\/test-.*\.js$/); -far.execute(); diff --git a/deps/npm/node_modules/npm-registry-client/package.json b/deps/npm/node_modules/npm-registry-client/package.json index 51ff6bebb24ddd..f463851a8978b5 100644 --- a/deps/npm/node_modules/npm-registry-client/package.json +++ b/deps/npm/node_modules/npm-registry-client/package.json @@ -2,52 +2,56 @@ "_args": [ [ { - "raw": "npm-registry-client@7.2.1", + "raw": "npm-registry-client@7.3.0", "scope": null, "escapedName": "npm-registry-client", "name": "npm-registry-client", - "rawSpec": "7.2.1", - "spec": "7.2.1", + "rawSpec": "7.3.0", + "spec": "7.3.0", "type": "version" }, - "/Users/zkat/Documents/code/npm" + "/Users/rebecca/code/npm" ] ], - "_from": "npm-registry-client@7.2.1", - "_id": "npm-registry-client@7.2.1", + "_from": "npm-registry-client@7.3.0", + "_id": "npm-registry-client@7.3.0", "_inCache": true, "_location": "/npm-registry-client", - "_nodeVersion": "6.3.1", + "_nodeVersion": "6.7.0", "_npmOperationalInternal": { - "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/npm-registry-client-7.2.1.tgz_1472871043942_0.2117650501895696" + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/npm-registry-client-7.3.0.tgz_1476956977374_0.8605887587182224" }, "_npmUser": { - "name": "othiym23", - "email": "ogd@aoaioxxysz.net" + "name": "iarna", + "email": "me@re-becca.org" }, - "_npmVersion": "3.10.7", + "_npmVersion": "3.10.9", "_phantomChildren": { - "inherits": "2.0.3" + "aproba": "1.0.4", + "has-unicode": "2.0.1", + "inherits": "2.0.3", + "readable-stream": "2.1.5", + "strip-ansi": "3.0.1" }, "_requested": { - "raw": "npm-registry-client@7.2.1", + "raw": "npm-registry-client@7.3.0", "scope": null, "escapedName": "npm-registry-client", "name": "npm-registry-client", - "rawSpec": "7.2.1", - "spec": "7.2.1", + "rawSpec": "7.3.0", + "spec": "7.3.0", "type": "version" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/npm-registry-client/-/npm-registry-client-7.2.1.tgz", - "_shasum": "c792266b088cc313f8525e7e35248626c723db75", + "_resolved": "https://registry.npmjs.org/npm-registry-client/-/npm-registry-client-7.3.0.tgz", + "_shasum": "f2a390e8b13b78fafe26e9fa9d8bc74e17bcaa50", "_shrinkwrap": null, - "_spec": "npm-registry-client@7.2.1", - "_where": "/Users/zkat/Documents/code/npm", + "_spec": "npm-registry-client@7.3.0", + "_where": "/Users/rebecca/code/npm", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -79,10 +83,10 @@ }, "directories": {}, "dist": { - "shasum": "c792266b088cc313f8525e7e35248626c723db75", - "tarball": "https://registry.npmjs.org/npm-registry-client/-/npm-registry-client-7.2.1.tgz" + "shasum": "f2a390e8b13b78fafe26e9fa9d8bc74e17bcaa50", + "tarball": "https://registry.npmjs.org/npm-registry-client/-/npm-registry-client-7.3.0.tgz" }, - "gitHead": "debec76884db8092c2c7a21ab5b4ed083f8ce2c9", + "gitHead": "0e0a707dcf62e9705ef17b7d471d3657c33e79e6", "homepage": "https://github.com/npm/npm-registry-client#readme", "license": "ISC", "main": "index.js", @@ -115,5 +119,5 @@ "scripts": { "test": "standard && tap test/*.js" }, - "version": "7.2.1" + "version": "7.3.0" } diff --git a/deps/npm/node_modules/npm-registry-client/test/config-override.js b/deps/npm/node_modules/npm-registry-client/test/config-override.js index 026cb199cbe93c..ab44aa02b72503 100644 --- a/deps/npm/node_modules/npm-registry-client/test/config-override.js +++ b/deps/npm/node_modules/npm-registry-client/test/config-override.js @@ -24,7 +24,9 @@ var config = { log: { fake: function () {} }, defaultTag: 'next', couchToken: { object: true }, - sessionToken: 'hamchunx' + sessionToken: 'hamchunx', + isFromCI: true, + scope: '@test' } test('config defaults', function (t) { @@ -52,6 +54,8 @@ test('config defaults', function (t) { t.equal(client.config.defaultTag, 'next') t.ok(client.config.couchToken.object) t.equal(client.config.sessionToken, 'hamchunx') + t.ok(client.config.isFromCI) + t.is(client.config.scope, '@test') t.end() }) diff --git a/deps/npm/node_modules/npmlog/CHANGELOG.md b/deps/npm/node_modules/npmlog/CHANGELOG.md index f549a1f3de81e3..a4cb68728b2bfe 100644 --- a/deps/npm/node_modules/npmlog/CHANGELOG.md +++ b/deps/npm/node_modules/npmlog/CHANGELOG.md @@ -1,3 +1,12 @@ +### v4.0.1 + +* Fix bugs where `log.progressEnabled` got out of sync with how `gauge` kept + track of these things resulting in a progressbar that couldn't be disabled. + +### v4.0.0 + +* Allow creating log levels that are an empty string or 0. + ### v3.1.2 * Update to `gauge@1.6.0` adding support for default values for template diff --git a/deps/npm/node_modules/npmlog/log.js b/deps/npm/node_modules/npmlog/log.js index bf894fb7a63423..be67567ad64882 100644 --- a/deps/npm/node_modules/npmlog/log.js +++ b/deps/npm/node_modules/npmlog/log.js @@ -39,6 +39,7 @@ log.disableColor = function () { log.level = 'info' log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked theme: {hasColor: log.useColor()}, template: [ {type: 'progressbar', length: 20}, @@ -51,8 +52,9 @@ log.gauge = new Gauge(stream, { log.tracker = new Progress.TrackerGroup() -// no progress bars unless asked -log.progressEnabled = false +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() var unicodeEnabled @@ -77,15 +79,13 @@ log.setGaugeTemplate = function (template) { log.enableProgress = function () { if (this.progressEnabled) return this.progressEnabled = true - if (this._pause) return this.tracker.on('change', this.showProgress) + if (this._pause) return this.gauge.enable() - this.showProgress() } log.disableProgress = function () { if (!this.progressEnabled) return - this.clearProgress() this.progressEnabled = false this.tracker.removeListener('change', this.showProgress) this.gauge.disable() @@ -147,6 +147,7 @@ log.showProgress = function (name, completed) { // temporarily stop emitting, but don't drop log.pause = function () { this._paused = true + if (this.progressEnabled) this.gauge.disable() } log.resume = function () { @@ -158,7 +159,7 @@ log.resume = function () { b.forEach(function (m) { this.emitLog(m) }, this) - if (this.progressEnabled) this.enableProgress() + if (this.progressEnabled) this.gauge.enable() } log._buffer = [] diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/CHANGELOG.md b/deps/npm/node_modules/npmlog/node_modules/gauge/CHANGELOG.md index efd08fc7c19dd7..e64d969fed28a8 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/CHANGELOG.md +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/CHANGELOG.md @@ -1,3 +1,13 @@ +### v2.7.1 + +* Bug fix: Calls to show/pulse while the progress bar is disabled should still + update our internal representation of what would be shown should it be enabled. + +### v2.7.0 + +* New feature: Add new `isEnabled` method to allow introspection of the gauge's + "enabledness" as controlled by `.enable()` and `.disable()`. + ### v2.6.0 * Bug fix: Don't run the code associated with `enable`/`disable` if the gauge diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md index bf87d189f1b5c1..bdd60e38c20929 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/README.md +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/README.md @@ -145,6 +145,10 @@ Hides the gauge and ignores further calls to `show` or `pulse`. Shows the gauge and resumes updating when `show` or `pulse` is called. +#### `gauge.isEnabled()` + +Returns true if the gauge is enabled. + #### `gauge.setThemeset(themes)` Change the themeset to select a theme from. The same as the `themes` option diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/index.js index 7eefb9507bb47e..17b4ece06efb64 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/index.js +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/index.js @@ -74,6 +74,10 @@ function Gauge (arg1, arg2) { } Gauge.prototype = {} +Gauge.prototype.isEnabled = function () { + return !this._disabled +} + Gauge.prototype.setTemplate = function (template) { this._gauge.setTemplate(template) if (this._showing) this._requestRedraw() @@ -164,7 +168,6 @@ Gauge.prototype.hide = function (cb) { } Gauge.prototype.show = function (section, completed) { - if (this._disabled) return this._showing = true if (typeof section === 'string') { this._status.section = section @@ -176,14 +179,15 @@ Gauge.prototype.show = function (section, completed) { } } if (completed != null) this._status.completed = completed + if (this._disabled) return this._requestRedraw() } Gauge.prototype.pulse = function (subsection) { - if (this._disabled) return - if (!this._showing) return this._status.subsection = subsection || '' this._status.spun ++ + if (this._disabled) return + if (!this._showing) return this._requestRedraw() } diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md index c12f8340c8d6c6..e2f70d22503634 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.0.1](https://github.com/tapjs/signal-exit/compare/v3.0.0...v3.0.1) (2016-09-08) + + +### Bug Fixes + +* do not listen on SIGBUS, SIGFPE, SIGSEGV and SIGILL ([#40](https://github.com/tapjs/signal-exit/issues/40)) ([5b105fb](https://github.com/tapjs/signal-exit/commit/5b105fb)) + + + # [3.0.0](https://github.com/tapjs/signal-exit/compare/v2.1.2...v3.0.0) (2016-06-13) diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt index c7e27478a3eff8..eead04a12162dc 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/LICENSE.txt @@ -1,3 +1,5 @@ +The ISC License + Copyright (c) 2015, Contributors Permission to use, copy, modify, and/or distribute this software diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json index ce0807f2de3227..84b00e1d11611c 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/package.json @@ -2,51 +2,64 @@ "_args": [ [ { - "name": "signal-exit", "raw": "signal-exit@^3.0.0", + "scope": null, + "escapedName": "signal-exit", + "name": "signal-exit", "rawSpec": "^3.0.0", + "spec": ">=3.0.0 <4.0.0", + "type": "range" + }, + "/Users/rebecca/code/npm2/node_modules/node-gyp/node_modules/npmlog/node_modules/gauge" + ], + [ + { + "raw": "signal-exit@^3.0.0", "scope": null, + "escapedName": "signal-exit", + "name": "signal-exit", + "rawSpec": "^3.0.0", "spec": ">=3.0.0 <4.0.0", "type": "range" }, - "/Users/rebecca/code/npm/node_modules/npmlog/node_modules/gauge" + "/Users/rebecca/code/npm2/node_modules/npmlog/node_modules/gauge" ] ], - "_from": "signal-exit@>=3.0.0 <4.0.0", - "_id": "signal-exit@3.0.0", + "_from": "signal-exit@^3.0.0", + "_id": "signal-exit@3.0.1", "_inCache": true, - "_installable": true, "_location": "/npmlog/gauge/signal-exit", - "_nodeVersion": "5.1.0", + "_nodeVersion": "6.5.0", "_npmOperationalInternal": { "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/signal-exit-3.0.0.tgz_1465857346813_0.7961636525578797" + "tmp": "tmp/signal-exit-3.0.1.tgz_1473354783379_0.4592130535747856" }, "_npmUser": { - "email": "ben@npmjs.com", - "name": "bcoe" + "name": "bcoe", + "email": "ben@npmjs.com" }, - "_npmVersion": "3.3.12", + "_npmVersion": "3.10.3", "_phantomChildren": {}, "_requested": { - "name": "signal-exit", "raw": "signal-exit@^3.0.0", - "rawSpec": "^3.0.0", "scope": null, + "escapedName": "signal-exit", + "name": "signal-exit", + "rawSpec": "^3.0.0", "spec": ">=3.0.0 <4.0.0", "type": "range" }, "_requiredBy": [ "/npmlog/gauge" ], - "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz", - "_shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", + "_resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz", + "_shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", "_shrinkwrap": null, "_spec": "signal-exit@^3.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/npmlog/node_modules/gauge", + "_where": "/Users/rebecca/code/npm2/node_modules/npmlog/node_modules/gauge", "author": { - "email": "ben@npmjs.com", - "name": "Ben Coe" + "name": "Ben Coe", + "email": "ben@npmjs.com" }, "bugs": { "url": "https://github.com/tapjs/signal-exit/issues" @@ -55,22 +68,22 @@ "description": "when you want to fire an event no matter how a process exits.", "devDependencies": { "chai": "^3.5.0", - "coveralls": "^2.11.2", - "nyc": "^6.4.4", + "coveralls": "^2.11.10", + "nyc": "^8.1.0", "standard": "^7.1.2", "standard-version": "^2.3.0", - "tap": "^5.7.2" + "tap": "^7.1.0" }, "directories": {}, "dist": { - "shasum": "3c0543b65d7b4fbc60b6cd94593d9bf436739be8", - "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.0.tgz" + "shasum": "5a4c884992b63a7acd9badb7894c3ee9cfccad81", + "tarball": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.1.tgz" }, "files": [ "index.js", "signals.js" ], - "gitHead": "2bbec4e5d9f9cf1f7529b1c923d1b058e69ccf7f", + "gitHead": "6859aff54f5198c63fff91baef279b86026bde69", "homepage": "https://github.com/tapjs/signal-exit", "keywords": [ "signal", @@ -80,12 +93,12 @@ "main": "index.js", "maintainers": [ { - "email": "ben@npmjs.com", - "name": "bcoe" + "name": "bcoe", + "email": "ben@npmjs.com" }, { - "email": "isaacs@npmjs.com", - "name": "isaacs" + "name": "isaacs", + "email": "isaacs@npmjs.com" } ], "name": "signal-exit", @@ -101,5 +114,5 @@ "release": "standard-version", "test": "tap --timeout=240 ./test/*.js --cov" }, - "version": "3.0.0" + "version": "3.0.1" } diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js index bc6f97ee606954..3bd67a8a554e30 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/signal-exit/signals.js @@ -13,15 +13,16 @@ // fatal signal like SIGWINCH or something, and then // exit, it'll end up firing `process.emit('exit')`, so // the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. module.exports = [ 'SIGABRT', 'SIGALRM', - 'SIGBUS', - 'SIGFPE', 'SIGHUP', - 'SIGILL', 'SIGINT', - 'SIGSEGV', 'SIGTERM' ] diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/index.js index 0335117977237f..0432fe6a30af45 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/index.js +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/index.js @@ -1,6 +1,5 @@ +/* eslint-disable babel/new-cap, xo/throw-new-error */ 'use strict'; -var numberIsNan = require('number-is-nan'); - module.exports = function (str, pos) { if (str === null || str === undefined) { throw TypeError(); @@ -11,7 +10,7 @@ module.exports = function (str, pos) { var size = str.length; var i = pos ? Number(pos) : 0; - if (numberIsNan(i)) { + if (Number.isNaN(i)) { i = 0; } @@ -25,7 +24,7 @@ module.exports = function (str, pos) { var second = str.charCodeAt(i + 1); if (second >= 0xDC00 && second <= 0xDFFF) { - return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + return ((first - 0xD800) * 0x400) + second - 0xDC00 + 0x10000; } } diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/index.js deleted file mode 100644 index 79be4b9cb8c3bc..00000000000000 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/index.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; -module.exports = Number.isNaN || function (x) { - return x !== x; -}; diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json deleted file mode 100644 index 018bcb37151877..00000000000000 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/package.json +++ /dev/null @@ -1,93 +0,0 @@ -{ - "_args": [ - [ - "number-is-nan@^1.0.0", - "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" - ] - ], - "_from": "number-is-nan@>=1.0.0 <2.0.0", - "_id": "number-is-nan@1.0.0", - "_inCache": true, - "_installable": true, - "_location": "/npmlog/gauge/string-width/code-point-at/number-is-nan", - "_nodeVersion": "0.12.3", - "_npmUser": { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" - }, - "_npmVersion": "2.10.0", - "_phantomChildren": {}, - "_requested": { - "name": "number-is-nan", - "raw": "number-is-nan@^1.0.0", - "rawSpec": "^1.0.0", - "scope": null, - "spec": ">=1.0.0 <2.0.0", - "type": "range" - }, - "_requiredBy": [ - "/npmlog/gauge/string-width/code-point-at" - ], - "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz", - "_shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", - "_shrinkwrap": null, - "_spec": "number-is-nan@^1.0.0", - "_where": "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at", - "author": { - "email": "sindresorhus@gmail.com", - "name": "Sindre Sorhus", - "url": "sindresorhus.com" - }, - "bugs": { - "url": "https://github.com/sindresorhus/number-is-nan/issues" - }, - "dependencies": {}, - "description": "ES6 Number.isNaN() ponyfill", - "devDependencies": { - "ava": "0.0.4" - }, - "directories": {}, - "dist": { - "shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", - "tarball": "http://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz" - }, - "engines": { - "node": ">=0.10.0" - }, - "files": [ - "index.js" - ], - "gitHead": "0f394b1bc33185c40304363b209e3f0588dbeeb3", - "homepage": "https://github.com/sindresorhus/number-is-nan#readme", - "keywords": [ - "es6", - "es2015", - "ecmascript", - "harmony", - "ponyfill", - "polyfill", - "shim", - "number", - "is", - "nan", - "not" - ], - "license": "MIT", - "maintainers": [ - { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" - } - ], - "name": "number-is-nan", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/sindresorhus/number-is-nan.git" - }, - "scripts": { - "test": "node test.js" - }, - "version": "1.0.0" -} diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md deleted file mode 100644 index 93d851a14f1ac5..00000000000000 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/node_modules/number-is-nan/readme.md +++ /dev/null @@ -1,30 +0,0 @@ -# number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) - -> ES6 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method - - -## Install - -``` -$ npm install --save number-is-nan -``` - - -## Usage - -```js -var numberIsNan = require('number-is-nan'); - -numberIsNan(NaN); -//=> true - -numberIsNan('unicorn'); -//=> false -``` - - -## License - -MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json index 4c1430d8e52ade..dd1f05b6a16f9e 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/package.json @@ -1,57 +1,68 @@ { "_args": [ [ - "code-point-at@^1.0.0", - "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width" + { + "raw": "code-point-at@^1.0.0", + "scope": null, + "escapedName": "code-point-at", + "name": "code-point-at", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/rebecca/code/npm-latest/node_modules/npmlog/node_modules/gauge/node_modules/string-width" ] ], "_from": "code-point-at@>=1.0.0 <2.0.0", - "_id": "code-point-at@1.0.0", + "_id": "code-point-at@1.1.0", "_inCache": true, - "_installable": true, "_location": "/npmlog/gauge/string-width/code-point-at", - "_nodeVersion": "0.12.5", + "_nodeVersion": "4.6.1", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/code-point-at-1.1.0.tgz_1478169780337_0.8445875702891499" + }, "_npmUser": { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" }, - "_npmVersion": "2.11.2", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { - "name": "code-point-at", "raw": "code-point-at@^1.0.0", - "rawSpec": "^1.0.0", "scope": null, + "escapedName": "code-point-at", + "name": "code-point-at", + "rawSpec": "^1.0.0", "spec": ">=1.0.0 <2.0.0", "type": "range" }, "_requiredBy": [ "/npmlog/gauge/string-width" ], - "_resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.0.0.tgz", - "_shasum": "f69b192d3f7d91e382e4b71bddb77878619ab0c6", + "_resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "_shasum": "0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77", "_shrinkwrap": null, "_spec": "code-point-at@^1.0.0", - "_where": "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width", + "_where": "/Users/rebecca/code/npm-latest/node_modules/npmlog/node_modules/gauge/node_modules/string-width", "author": { - "email": "sindresorhus@gmail.com", "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", "url": "sindresorhus.com" }, "bugs": { "url": "https://github.com/sindresorhus/code-point-at/issues" }, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "description": "ES2015 String#codePointAt() ponyfill", + "dependencies": {}, + "description": "ES2015 `String#codePointAt()` ponyfill", "devDependencies": { - "ava": "0.0.4" + "ava": "*", + "xo": "^0.16.0" }, "directories": {}, "dist": { - "shasum": "f69b192d3f7d91e382e4b71bddb77878619ab0c6", - "tarball": "http://registry.npmjs.org/code-point-at/-/code-point-at-1.0.0.tgz" + "shasum": "0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77", + "tarball": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz" }, "engines": { "node": ">=0.10.0" @@ -59,11 +70,10 @@ "files": [ "index.js" ], - "gitHead": "c2ffa4064718b37c84c73a633abeeed5b486a469", - "homepage": "https://github.com/sindresorhus/code-point-at", + "gitHead": "f8f21c8df2d40248fef1b36ca9076e59c0c34791", + "homepage": "https://github.com/sindresorhus/code-point-at#readme", "keywords": [ "es2015", - "es6", "ponyfill", "polyfill", "shim", @@ -78,8 +88,8 @@ "license": "MIT", "maintainers": [ { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" } ], "name": "code-point-at", @@ -90,7 +100,7 @@ "url": "git+https://github.com/sindresorhus/code-point-at.git" }, "scripts": { - "test": "node test.js" + "test": "xo && ava" }, - "version": "1.0.0" + "version": "1.1.0" } diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md index 71e7d0931b8b0c..4c97730e69e6f4 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at/readme.md @@ -1,8 +1,6 @@ # code-point-at [![Build Status](https://travis-ci.org/sindresorhus/code-point-at.svg?branch=master)](https://travis-ci.org/sindresorhus/code-point-at) -> ES2015 [`String#codePointAt()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method +> ES2015 [`String#codePointAt()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/codePointAt) [ponyfill](https://ponyfill.com) ## Install @@ -31,4 +29,4 @@ codePointAt('abc', 2); ## License -MIT © [Sindre Sorhus](http://sindresorhus.com) +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json index 287e697c31ad34..5430e54f9261ce 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/package.json @@ -1,59 +1,67 @@ { "_args": [ [ - "number-is-nan@^1.0.0", - "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/code-point-at" - ], - [ - "number-is-nan@^1.0.0", - "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point" + { + "raw": "number-is-nan@^1.0.0", + "scope": null, + "escapedName": "number-is-nan", + "name": "number-is-nan", + "rawSpec": "^1.0.0", + "spec": ">=1.0.0 <2.0.0", + "type": "range" + }, + "/Users/rebecca/code/npm-latest/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point" ] ], - "_from": "number-is-nan@^1.0.0", - "_id": "number-is-nan@1.0.0", + "_from": "number-is-nan@>=1.0.0 <2.0.0", + "_id": "number-is-nan@1.0.1", "_inCache": true, - "_installable": true, "_location": "/npmlog/gauge/string-width/is-fullwidth-code-point/number-is-nan", - "_nodeVersion": "0.12.3", + "_nodeVersion": "4.5.0", + "_npmOperationalInternal": { + "host": "packages-12-west.internal.npmjs.com", + "tmp": "tmp/number-is-nan-1.0.1.tgz_1475212313367_0.9480371843092144" + }, "_npmUser": { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" }, - "_npmVersion": "2.10.0", + "_npmVersion": "2.15.9", "_phantomChildren": {}, "_requested": { - "name": "number-is-nan", "raw": "number-is-nan@^1.0.0", - "rawSpec": "^1.0.0", "scope": null, + "escapedName": "number-is-nan", + "name": "number-is-nan", + "rawSpec": "^1.0.0", "spec": ">=1.0.0 <2.0.0", "type": "range" }, "_requiredBy": [ "/npmlog/gauge/string-width/is-fullwidth-code-point" ], - "_resolved": "http://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz", - "_shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", + "_resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "_shasum": "097b602b53422a522c1afb8790318336941a011d", "_shrinkwrap": null, "_spec": "number-is-nan@^1.0.0", - "_where": "/Users/rebecca/code/npm-with-new-gauge/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point", + "_where": "/Users/rebecca/code/npm-latest/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point", "author": { - "email": "sindresorhus@gmail.com", "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", "url": "sindresorhus.com" }, "bugs": { "url": "https://github.com/sindresorhus/number-is-nan/issues" }, "dependencies": {}, - "description": "ES6 Number.isNaN() ponyfill", + "description": "ES2015 Number.isNaN() ponyfill", "devDependencies": { - "ava": "0.0.4" + "ava": "*" }, "directories": {}, "dist": { - "shasum": "c020f529c5282adfdd233d91d4b181c3d686dc4b", - "tarball": "http://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.0.tgz" + "shasum": "097b602b53422a522c1afb8790318336941a011d", + "tarball": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz" }, "engines": { "node": ">=0.10.0" @@ -61,13 +69,11 @@ "files": [ "index.js" ], - "gitHead": "0f394b1bc33185c40304363b209e3f0588dbeeb3", + "gitHead": "ed9cdac3f428cc929b61bb230da42c87477af4b9", "homepage": "https://github.com/sindresorhus/number-is-nan#readme", "keywords": [ - "es6", "es2015", "ecmascript", - "harmony", "ponyfill", "polyfill", "shim", @@ -79,8 +85,8 @@ "license": "MIT", "maintainers": [ { - "email": "sindresorhus@gmail.com", - "name": "sindresorhus" + "name": "sindresorhus", + "email": "sindresorhus@gmail.com" } ], "name": "number-is-nan", @@ -91,7 +97,7 @@ "url": "git+https://github.com/sindresorhus/number-is-nan.git" }, "scripts": { - "test": "node test.js" + "test": "ava" }, - "version": "1.0.0" + "version": "1.0.1" } diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md index 93d851a14f1ac5..24635087120128 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/string-width/node_modules/is-fullwidth-code-point/node_modules/number-is-nan/readme.md @@ -1,8 +1,6 @@ # number-is-nan [![Build Status](https://travis-ci.org/sindresorhus/number-is-nan.svg?branch=master)](https://travis-ci.org/sindresorhus/number-is-nan) -> ES6 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) ponyfill - -> Ponyfill: A polyfill that doesn't overwrite the native method +> ES2015 [`Number.isNaN()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN) [ponyfill](https://ponyfill.com) ## Install diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/package.json index 846a27e8a44d85..9ab6582b5072c9 100644 --- a/deps/npm/node_modules/npmlog/node_modules/gauge/package.json +++ b/deps/npm/node_modules/npmlog/node_modules/gauge/package.json @@ -2,53 +2,54 @@ "_args": [ [ { - "name": "gauge", - "raw": "gauge@~2.6.0", - "rawSpec": "~2.6.0", + "raw": "gauge@~2.7.1", "scope": null, - "spec": ">=2.6.0 <2.7.0", + "escapedName": "gauge", + "name": "gauge", + "rawSpec": "~2.7.1", + "spec": ">=2.7.1 <2.8.0", "type": "range" }, - "/Users/rebecca/code/npm/node_modules/npmlog" + "/Users/rebecca/code/npm-latest/node_modules/npmlog" ] ], - "_from": "gauge@>=2.6.0 <2.7.0", - "_id": "gauge@2.6.0", + "_from": "gauge@>=2.7.1 <2.8.0", + "_id": "gauge@2.7.1", "_inCache": true, - "_installable": true, "_location": "/npmlog/gauge", - "_nodeVersion": "4.4.0", + "_nodeVersion": "6.9.1", "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/gauge-2.6.0.tgz_1466067371972_0.20705468393862247" + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/gauge-2.7.1.tgz_1478210591065_0.5937802786938846" }, "_npmUser": { - "email": "me@re-becca.org", - "name": "iarna" + "name": "iarna", + "email": "me@re-becca.org" }, - "_npmVersion": "3.9.2", + "_npmVersion": "4.0.0", "_phantomChildren": { "strip-ansi": "3.0.1" }, "_requested": { - "name": "gauge", - "raw": "gauge@~2.6.0", - "rawSpec": "~2.6.0", + "raw": "gauge@~2.7.1", "scope": null, - "spec": ">=2.6.0 <2.7.0", + "escapedName": "gauge", + "name": "gauge", + "rawSpec": "~2.7.1", + "spec": ">=2.7.1 <2.8.0", "type": "range" }, "_requiredBy": [ "/npmlog" ], - "_resolved": "https://registry.npmjs.org/gauge/-/gauge-2.6.0.tgz", - "_shasum": "d35301ad18e96902b4751dcbbe40f4218b942a46", + "_resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.1.tgz", + "_shasum": "388473894fe8be5e13ffcdb8b93e4ed0616428c7", "_shrinkwrap": null, - "_spec": "gauge@~2.6.0", - "_where": "/Users/rebecca/code/npm/node_modules/npmlog", + "_spec": "gauge@~2.7.1", + "_where": "/Users/rebecca/code/npm-latest/node_modules/npmlog", "author": { - "email": "me@re-becca.org", - "name": "Rebecca Turner" + "name": "Rebecca Turner", + "email": "me@re-becca.org" }, "bugs": { "url": "https://github.com/iarna/gauge/issues" @@ -74,8 +75,8 @@ }, "directories": {}, "dist": { - "shasum": "d35301ad18e96902b4751dcbbe40f4218b942a46", - "tarball": "https://registry.npmjs.org/gauge/-/gauge-2.6.0.tgz" + "shasum": "388473894fe8be5e13ffcdb8b93e4ed0616428c7", + "tarball": "https://registry.npmjs.org/gauge/-/gauge-2.7.1.tgz" }, "files": [ "base-theme.js", @@ -98,7 +99,7 @@ "themes.js", "wide-truncate.js" ], - "gitHead": "d51040a71c269432c16cc542143f403a831630e6", + "gitHead": "d7ac37af0a44af2315656fb73f76f6bca03d084e", "homepage": "https://github.com/iarna/gauge", "keywords": [ "progressbar", @@ -109,8 +110,8 @@ "main": "index.js", "maintainers": [ { - "email": "me@re-becca.org", - "name": "iarna" + "name": "iarna", + "email": "me@re-becca.org" } ], "name": "gauge", @@ -121,7 +122,8 @@ "url": "git+https://github.com/iarna/gauge.git" }, "scripts": { + "prepublish": "rm -f *~", "test": "standard && tap test/*.js --coverage" }, - "version": "2.6.0" + "version": "2.7.1" } diff --git a/deps/npm/node_modules/npmlog/package.json b/deps/npm/node_modules/npmlog/package.json index 3f314d7916cf57..1a2fd8b0243e68 100644 --- a/deps/npm/node_modules/npmlog/package.json +++ b/deps/npm/node_modules/npmlog/package.json @@ -2,55 +2,55 @@ "_args": [ [ { - "raw": "npmlog@4.0.0", + "raw": "npmlog@4.0.1", "scope": null, "escapedName": "npmlog", "name": "npmlog", - "rawSpec": "4.0.0", - "spec": "4.0.0", + "rawSpec": "4.0.1", + "spec": "4.0.1", "type": "version" }, - "/Users/zkat/Documents/code/npm" + "/Users/rebecca/code/npm-latest" ] ], - "_from": "npmlog@4.0.0", - "_id": "npmlog@4.0.0", + "_from": "npmlog@4.0.1", + "_id": "npmlog@4.0.1", "_inCache": true, "_location": "/npmlog", - "_nodeVersion": "5.10.1", + "_nodeVersion": "7.1.0", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/npmlog-4.0.0.tgz_1468888150556_0.3835553650278598" + "tmp": "tmp/npmlog-4.0.1.tgz_1479345245313_0.32757814647629857" }, "_npmUser": { - "name": "zkat", - "email": "kat@sykosomatic.org" + "name": "iarna", + "email": "me@re-becca.org" }, - "_npmVersion": "3.10.4", + "_npmVersion": "3.10.10", "_phantomChildren": { "aproba": "1.0.4", "has-unicode": "2.0.1", - "readable-stream": "2.1.4", + "readable-stream": "2.2.2", "strip-ansi": "3.0.1" }, "_requested": { - "raw": "npmlog@4.0.0", + "raw": "npmlog@4.0.1", "scope": null, "escapedName": "npmlog", "name": "npmlog", - "rawSpec": "4.0.0", - "spec": "4.0.0", + "rawSpec": "4.0.1", + "spec": "4.0.1", "type": "version" }, "_requiredBy": [ "#USER", "/" ], - "_resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.0.0.tgz", - "_shasum": "e094503961c70c1774eb76692080e8d578a9f88f", + "_resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.0.1.tgz", + "_shasum": "d14f503b4cd79710375553004ba96e6662fbc0b8", "_shrinkwrap": null, - "_spec": "npmlog@4.0.0", - "_where": "/Users/zkat/Documents/code/npm", + "_spec": "npmlog@4.0.1", + "_where": "/Users/rebecca/code/npm-latest", "author": { "name": "Isaac Z. Schlueter", "email": "i@izs.me", @@ -62,7 +62,7 @@ "dependencies": { "are-we-there-yet": "~1.1.2", "console-control-strings": "~1.1.0", - "gauge": "~2.6.0", + "gauge": "~2.7.1", "set-blocking": "~2.0.0" }, "description": "logger for npm", @@ -72,13 +72,13 @@ }, "directories": {}, "dist": { - "shasum": "e094503961c70c1774eb76692080e8d578a9f88f", - "tarball": "https://registry.npmjs.org/npmlog/-/npmlog-4.0.0.tgz" + "shasum": "d14f503b4cd79710375553004ba96e6662fbc0b8", + "tarball": "https://registry.npmjs.org/npmlog/-/npmlog-4.0.1.tgz" }, "files": [ "log.js" ], - "gitHead": "3ca8823fdfa66f54c72adde3fd2c4e0237e6302b", + "gitHead": "c027c276f6f7e6c8d808767b0d611555e3ef5f61", "homepage": "https://github.com/npm/npmlog#readme", "license": "ISC", "main": "log.js", @@ -110,5 +110,5 @@ "scripts": { "test": "standard && tap test/*.js" }, - "version": "4.0.0" + "version": "4.0.1" } diff --git a/deps/npm/node_modules/readable-stream/.npmignore b/deps/npm/node_modules/readable-stream/.npmignore index 265ff739e071cd..6d270c6ccb3064 100644 --- a/deps/npm/node_modules/readable-stream/.npmignore +++ b/deps/npm/node_modules/readable-stream/.npmignore @@ -6,3 +6,4 @@ zlib.js .zuul.yml .nyc_output coverage +docs/ diff --git a/deps/npm/node_modules/readable-stream/README.md b/deps/npm/node_modules/readable-stream/README.md index 9fb4feaaa12419..9be2adb1582512 100644 --- a/deps/npm/node_modules/readable-stream/README.md +++ b/deps/npm/node_modules/readable-stream/README.md @@ -1,6 +1,6 @@ # readable-stream -***Node-core v6.3.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) +***Node-core v7.0.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) [![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) @@ -16,7 +16,9 @@ npm install --save readable-stream ***Node-core streams for userland*** This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.md). +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v7.1.0/docs/api/). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). @@ -34,3 +36,5 @@ As of version 2.0.0 **readable-stream** uses semantic versioning. * **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> * **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> * **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E diff --git a/deps/npm/node_modules/readable-stream/doc/stream.md b/deps/npm/node_modules/readable-stream/doc/stream.md deleted file mode 100644 index fc269c8e38f829..00000000000000 --- a/deps/npm/node_modules/readable-stream/doc/stream.md +++ /dev/null @@ -1,2015 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface for working with streaming data in Node.js. -The `stream` module provides a base API that makes it easy to build objects -that implement the stream interface. - -There are many stream objects provided by Node.js. For instance, a -[request to an HTTP server][http-incoming-message] and [`process.stdout`][] -are both stream instances. - -Streams can be readable, writable, or both. All streams are instances of -[`EventEmitter`][]. - -The `stream` module can be accessed using: - -```js -const stream = require('stream'); -``` - -While it is important for all Node.js users to understand how streams works, -the `stream` module itself is most useful for developer's that are creating new -types of stream instances. Developer's who are primarily *consuming* stream -objects will rarely (if ever) have need to use the `stream` module directly. - -## Organization of this document - -This document is divided into two primary sections and third section for -additional notes. The first section explains the elements of the stream API that -are required to *use* streams within an application. The second section explains -the elements of the API that are required to *implement* new types of streams. - -## Types of Streams - -There are four fundamental stream types within Node.js: - -* [Readable][] - streams from which data can be read (for example - [`fs.createReadStream()`][]). -* [Writable][] - streams to which data can be written (for example - [`fs.createWriteStream()`][]). -* [Duplex][] - streams that are both Readable and Writable (for example - [`net.Socket`][]). -* [Transform][] - Duplex streams that can modify or transform the data as it - is written and read (for example [`zlib.createDeflate()`][]). - -### Object Mode - -All streams created by Node.js APIs operate exclusively on strings and `Buffer` -objects. It is possible, however, for stream implementations to work with other -types of JavaScript values (with the exception of `null` which serves a special -purpose within streams). Such streams are considered to operate in "object -mode". - -Stream instances are switched into object mode using the `objectMode` option -when the stream is created. Attempting to switch an existing stream into -object mode is not safe. - -### Buffering - - - -Both [Writable][] and [Readable][] streams will store data in an internal -buffer that can be retrieved using `writable._writableState.getBuffer()` or -`readable._readableState.buffer`, respectively. - -The amount of data potentially buffered depends on the `highWaterMark` option -passed into the streams constructor. For normal streams, the `highWaterMark` -option specifies a total number of bytes. For streams operating in object mode, -the `highWaterMark` specifies a total number of objects. - -Data is buffered in Readable streams when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], the data will sit in the internal -queue until it is consumed. - -Once the total size of the internal read buffer reaches the threshold specified -by `highWaterMark`, the stream will temporarily stop reading data from the -underlying resource until the data currently buffered can be consumed (that is, -the stream will stop calling the internal `readable._read()` method that is -used to fill the read buffer). - -Data is buffered in Writable streams when the -[`writable.write(chunk)`][stream-write] method is called repeatedly. While the -total size of the internal write buffer is below the threshold set by -`highWaterMark`, calls to `writable.write()` will return `true`. Once the -the size of the internal buffer reaches or exceeds the `highWaterMark`, `false` -will be returned. - -A key goal of the `stream` API, and in particular the [`stream.pipe()`] method, -is to limit the buffering of data to acceptable levels such that sources and -destinations of differing speeds will not overwhelm the available memory. - -Because [Duplex][] and [Transform][] streams are both Readable and Writable, -each maintain *two* separate internal buffers used for reading and writing, -allowing each side to operate independently of the other while maintaining an -appropriate and efficient flow of data. For example, [`net.Socket`][] instances -are [Duplex][] streams whose Readable side allows consumption of data received -*from* the socket and whose Writable side allows writing data *to* the socket. -Because data may be written to the socket at a faster or slower rate than data -is received, it is important each side operate (and buffer) independently of -the other. - -## API for Stream Consumers - - - -Almost all Node.js applications, no matter how simple, use streams in some -manner. The following is an example of using streams in a Node.js application -that implements an HTTP server: - -```js -const http = require('http'); - -const server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - let body = ''; - // Get the data as utf8 strings. - // If an encoding is not set, Buffer objects will be received. - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event indicates that the entire body has been received - req.on('end', () => { - try { - const data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -[Writable][] streams (such as `res` in the example) expose methods such as -`write()` and `end()` that are used to write data onto the stream. - -[Readable][] streams use the [`EventEmitter`][] API for notifying application -code when data is available to be read off the stream. That available data can -be read from the stream in multiple ways. - -Both [Writable][] and [Readable][] streams use the [`EventEmitter`][] API in -various ways to communicate the current state of the stream. - -[Duplex][] and [Transform][] streams are both [Writable][] and [Readable][]. - -Applications that are either writing data to or consuming data from a stream -are not required to implement the stream interfaces directly and will generally -have no reason to call `require('stream')`. - -Developers wishing to implement new types of streams should refer to the -section [API for Stream Implementers][]. - -### Writable Streams - -Writable streams are an abstraction for a *destination* to which data is -written. - -Examples of [Writable][] streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -*Note*: Some of these examples are actually [Duplex][] streams that implement -the [Writable][] interface. - -All [Writable][] streams implement the interface defined by the -`stream.Writable` class. - -While specific instances of [Writable][] streams may differ in various ways, -all Writable streams follow the same fundamental usage pattern as illustrated -in the example below: - -```js -const myStream = getWritableStreamSomehow(); -myStream.write('some data'); -myStream.write('some more data'); -myStream.end('done writing data'); -``` - -#### Class: stream.Writable - - - - -##### Event: 'close' - - -The `'close'` event is emitted when the stream and any of its underlying -resources (a file descriptor, for example) have been closed. The event indicates -that no more events will be emitted, and no further computation will occur. - -Not all Writable streams will emit the `'close'` event. - -##### Event: 'drain' - - -If a call to [`stream.write(chunk)`][stream-write] returns `false`, the -`'drain'` event will be emitted when it is appropriate to resume writing data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - let i = 1000000; - write(); - function write() { - var ok = true; - do { - i--; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -##### Event: 'error' - - -* {Error} - -The `'error'` event is emitted if an error occurred while writing or piping -data. The listener callback is passed a single `Error` argument when called. - -*Note*: The stream is not closed when the `'error'` event is emitted. - -##### Event: 'finish' - - -The `'finish'` event is emitted after the [`stream.end()`][stream-end] method -has been called, and all data has been flushed to the underlying system. - -```js -const writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('This is the end\n'); -writer.on('finish', () => { - console.error('All writes are now complete.'); -}); -``` - -##### Event: 'pipe' - - -* `src` {stream.Readable} source stream that is piping to this writable - -The `'pipe'` event is emitted when the [`stream.pipe()`][] method is called on -a readable stream, adding this writable to its set of destinations. - -```js -const writer = getWritableStreamSomehow(); -const reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -##### Event: 'unpipe' - - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called -on a [Readable][] stream, removing this [Writable][] from its set of -destinations. - -```js -const writer = getWritableStreamSomehow(); -const reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('Something has stopped piping into the writer.'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -##### writable.cork() - - -The `writable.cork()` method forces all written data to be buffered in memory. -The buffered data will be flushed when either the [`stream.uncork()`][] or -[`stream.end()`][stream-end] methods are called. - -The primary intent of `writable.cork()` is to avoid a situation where writing -many small chunks of data to a stream do not cause an backup in the internal -buffer that would have an adverse impact on performance. In such situations, -implementations that implement the `writable._writev()` method can perform -buffered writes in a more optimized manner. - -##### writable.end([chunk][, encoding][, callback]) - - -* `chunk` {String|Buffer|any} Optional data to write. For streams not operating - in object mode, `chunk` must be a string or a `Buffer`. For object mode - streams, `chunk` may be any JavaScript value other than `null`. -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Calling the `writable.end()` method signals that no more data will be written -to the [Writable][]. The optional `chunk` and `encoding` arguments allow one -final additional chunk of data to be written immediately before closing the -stream. If provided, the optional `callback` function is attached as a listener -for the [`'finish'`][] event. - -Calling the [`stream.write()`][stream-write] method after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -const file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -##### writable.setDefaultEncoding(encoding) - - -* `encoding` {String} The new default encoding -* Return: `this` - -The `writable.setDefaultEncoding()` method sets the default `encoding` for a -[Writable][] stream. - -##### writable.uncork() - - -The `writable.uncork()` method flushes all data buffered since -[`stream.cork()`][] was called. - -When using `writable.cork()` and `writable.uncork()` to manage the buffering -of writes to a stream, it is recommended that calls to `writable.uncork()` be -deferred using `process.nextTick()`. Doing so allows batching of all -`writable.write()` calls that occur within a given Node.js event loop phase. - -```js -stream.cork(); -stream.write('some '); -stream.write('data '); -process.nextTick(() => stream.uncork()); -``` - -If the `writable.cork()` method is called multiple times on a stream, the same -number of calls to `writable.uncork()` must be called to flush the buffered -data. - -``` -stream.cork(); -stream.write('some '); -stream.cork(); -stream.write('data '); -process.nextTick(() => { - stream.uncork(); - // The data will not be flushed until uncork() is called a second time. - stream.uncork(); -}); -``` - -##### writable.write(chunk[, encoding][, callback]) - - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `false` if the stream wishes for the calling code to - wait for the `'drain'` event to be emitted before continuing to write - additional data; otherwise `true`. - -The `writable.write()` method writes some data to the stream, and calls the -supplied `callback` once the data has been fully handled. If an error -occurs, the `callback` *may or may not* be called with the error as its -first argument. To reliably detect write errors, add a listener for the -`'error'` event. - -The return value indicates whether the written `chunk` was buffered internally -and the buffer has exceeded the `highWaterMark` configured when the stream was -created. If `false` is returned, further attempts to write data to the stream -should be paused until the `'drain'` event is emitted. - -A Writable stream in object mode will always ignore the `encoding` argument. - -### Readable Streams - -Readable streams are an abstraction for a *source* from which data is -consumed. - -Examples of Readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -All [Readable][] streams implement the interface defined by the -`stream.Readable` class. - -#### Two Modes - -Readable streams effectively operate in one of two modes: flowing and paused. - -When in flowing mode, data is read from the underlying system automatically -and provided to an application as quickly as possible using events via the -[`EventEmitter`][] interface. - -In paused mode, the [`stream.read()`][stream-read] method must be called -explicitly to read chunks of data from the stream. - -All [Readable][] streams begin in paused mode but can be switched to flowing -mode in one of the following ways: - -* Adding a [`'data'`][] event handler. -* Calling the [`stream.resume()`][stream-resume] method. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -The Readable can switch back to paused mode using one of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -The important concept to remember is that a Readable will not generate data -until a mechanism for either consuming or ignoring that data is provided. If -the consuming mechanism is disabled or taken away, the Readable will *attempt* -to stop generating the data. - -*Note*: For backwards compatibility reasons, removing [`'data'`][] event -handlers will **not** automatically pause the stream. Also, if there are piped -destinations, then calling [`stream.pause()`][stream-pause] will not guarantee -that the stream will *remain* paused once those destinations drain and ask for -more data. - -*Note*: If a [Readable][] is switched into flowing mode and there are no -consumers available handle the data, that data will be lost. This can occur, -for instance, when the `readable.resume()` method is called without a listener -attached to the `'data'` event, or when a `'data'` event handler is removed -from the stream. - -#### Three States - -The "two modes" of operation for a Readable stream are a simplified abstraction -for the more complicated internal state management that is happening within the -Readable stream implementation. - -Specifically, at any given point in time, every Readable is in one of three -possible states: - -* `readable._readableState.flowing = null` -* `readable._readableState.flowing = false` -* `readable._readableState.flowing = true` - -When `readable._readableState.flowing` is `null`, no mechanism for consuming the -streams data is provided so the stream will not generate its data. - -Attaching a listener for the `'data'` event, calling the `readable.pipe()` -method, or calling the `readable.resume()` method will switch -`readable._readableState.flowing` to `true`, causing the Readable to begin -actively emitting events as data is generated. - -Calling `readable.pause()`, `readable.unpipe()`, or receiving "back pressure" -will cause the `readable._readableState.flowing` to be set as `false`, -temporarily halting the flowing of events but *not* halting the generation of -data. - -While `readable._readableState.flowing` is `false`, data may be accumulating -within the streams internal buffer. - -#### Choose One - -The Readable stream API evolved across multiple Node.js versions and provides -multiple methods of consuming stream data. In general, developers should choose -*one* of the methods of consuming data and *should never* use multiple methods -to consume data from a single stream. - -Use of the `readable.pipe()` method is recommended for most users as it has been -implemented to provide the easiest way of consuming stream data. Developers that -require more fine-grained control over the transfer and generation of data can -use the [`EventEmitter`][] and `readable.pause()`/`readable.resume()` APIs. - -#### Class: stream.Readable - - - - -##### Event: 'close' - - -The `'close'` event is emitted when the stream and any of its underlying -resources (a file descriptor, for example) have been closed. The event indicates -that no more events will be emitted, and no further computation will occur. - -Not all [Readable][] streams will emit the `'close'` event. - -##### Event: 'data' - - -* `chunk` {Buffer|String|any} The chunk of data. For streams that are not - operating in object mode, the chunk will be either a string or `Buffer`. - For streams that are in object mode, the chunk can be any JavaScript value - other than `null`. - -The `'data'` event is emitted whenever the stream is relinquishing ownership of -a chunk of data to a consumer. This may occur whenever the stream is switched -in flowing mode by calling `readable.pipe()`, `readable.resume()`, or by -attaching a listener callback to the `'data'` event. The `'data'` event will -also be emitted whenever the `readable.read()` method is called and a chunk of -data is available to be returned. - -Attaching a `'data'` event listener to a stream that has not been explicitly -paused will switch the stream into flowing mode. Data will then be passed as -soon as it is available. - -The listener callback will be passed the chunk of data as a string if a default -encoding has been specified for the stream using the -`readable.setEncoding()` method; otherwise the data will be passed as a -`Buffer`. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); -}); -``` - -##### Event: 'end' - - -The `'end'` event is emitted when there is no more data to be consumed from -the stream. - -*Note*: The `'end'` event **will not be emitted** unless the data is -completely consumed. This can be accomplished by switching the stream into -flowing mode, or by calling [`stream.read()`][stream-read] repeatedly until -all data has been consumed. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); -}); -readable.on('end', () => { - console.log('There will be no more data.'); -}); -``` - -##### Event: 'error' - - -* {Error} - -The `'error'` event may be emitted by a Readable implementation at any time. -Typically, this may occur if the underlying stream in unable to generate data -due to an underlying internal failure, or when a stream implementation attempts -to push an invalid chunk of data. - -The listener callback will be passed a single `Error` object. - -##### Event: 'readable' - - -The `'readable'` event is emitted when there is data available to be read from -the stream. In some cases, attaching a listener for the `'readable'` event will -cause some amount of data to be read into an internal buffer. - -```javascript -const readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` -The `'readable'` event will also be emitted once the end of the stream data -has been reached but before the `'end'` event is emitted. - -Effectively, the `'readable'` event indicates that the stream has new -information: either new data is available or the end of the stream has been -reached. In the former case, [`stream.read()`][stream-read] will return the -available data. In the latter case, [`stream.read()`][stream-read] will return -`null`. For instance, in the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -const rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -*Note*: In general, the `readable.pipe()` and `'data'` event mechanisms are -preferred over the use of the `'readable'` event. - -##### readable.isPaused() - - -* Return: {Boolean} - -The `readable.isPaused()` method returns the current operating state of the -Readable. This is used primarily by the mechanism that underlies the -`readable.pipe()` method. In most typical cases, there will be no reason to -use this method directly. - -```js -const readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -##### readable.pause() - - -* Return: `this` - -The `readable.pause()` method will cause a stream in flowing mode to stop -emitting [`'data'`][] events, switching out of flowing mode. Any data that -becomes available will remain in the internal buffer. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); - readable.pause(); - console.log('There will be no additional data for 1 second.'); - setTimeout(() => { - console.log('Now data will start flowing again.'); - readable.resume(); - }, 1000); -}); -``` - -##### readable.pipe(destination[, options]) - - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Defaults to `true`. - -The `readable.pipe()` method attaches a [Writable][] stream to the `readable`, -causing it to switch automatically into flowing mode and push all of its data -to the attached [Writable][]. The flow of data will be automatically managed so -that the destination Writable stream is not overwhelmed by a faster Readable -stream. - -The following example pipes all of the data from the `readable` into a file -named `file.txt`: - -```js -const readable = getReadableStreamSomehow(); -const writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` -It is possible to attach multiple Writable streams to a single Readable stream. - -The `readable.pipe()` method returns a reference to the *destination* stream -making it possible to set up chains of piped streams: - -```js -const r = fs.createReadStream('file.txt'); -const z = zlib.createGzip(); -const w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -By default, [`stream.end()`][stream-end] is called on the destination Writable -stream when the source Readable stream emits [`'end'`][], so that the -destination is no longer writable. To disable this default behavior, the `end` -option can be passed as `false`, causing the destination stream to remain open, -as illustrated in the following example: - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -One important caveat is that if the Readable stream emits an error during -processing, the Writable destination *is not closed* automatically. If an -error occurs, it will be necessary to *manually* close each stream in order -to prevent memory leaks. - -*Note*: The [`process.stderr`][] and [`process.stdout`][] Writable streams are -never closed until the Node.js process exits, regardless of the specified -options. - -##### readable.read([size]) - - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `readable.read()` method pulls some data out of the internal buffer and -returns it. If no data available to be read, `null` is returned. By default, -the data will be returned as a `Buffer` object unless an encoding has been -specified using the `readable.setEncoding()` method or the stream is operating -in object mode. - -The optional `size` argument specifies a specific number of bytes to read. If -`size` bytes are not available to be read, `null` will be returned *unless* -the stream has ended, in which case all of the data remaining in the internal -buffer will be returned (*even if it exceeds `size` bytes*). - -If the `size` argument is not specified, all of the data contained in the -internal buffer will be returned. - -The `readable.read()` method should only be called on Readable streams operating -in paused mode. In flowing mode, `readable.read()` is called automatically until -the internal buffer is fully drained. - -```js -const readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log(`Received ${chunk.length} bytes of data.`); - } -}); -``` - -In general, it is recommended that developers avoid the use of the `'readable'` -event and the `readable.read()` method in favor of using either -`readable.pipe()` or the `'data'` event. - -A Readable stream in object mode will always return a single item from -a call to [`readable.read(size)`][stream-read], regardless of the value of the -`size` argument. - -*Note:* If the `readable.read()` method returns a chunk of data, a `'data'` -event will also be emitted. - -*Note*: Calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been emitted will return `null`. No runtime error will be raised. - -##### readable.resume() - - -* Return: `this` - -The `readable.resume()` method causes an explicitly paused Readable stream to -resume emitting [`'data'`][] events, switching the stream into flowing mode. - -The `readable.resume()` method can be used to fully consume the data from a -stream without actually processing any of that data as illustrated in the -following example: - -```js -getReadableStreamSomehow() - .resume() - .on('end', () => { - console.log('Reached the end, but did not read anything.'); - }); -``` - -##### readable.setEncoding(encoding) - - -* `encoding` {String} The encoding to use. -* Return: `this` - -The `readable.setEncoding()` method sets the default character encoding for -data read from the Readable stream. - -Setting an encoding causes the stream data -to be returned as string of the specified encoding rather than as `Buffer` -objects. For instance, calling `readable.setEncoding('utf8')` will cause the -output data will be interpreted as UTF-8 data, and passed as strings. Calling -`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal -string format. - -The Readable stream will properly handle multi-byte characters delivered through -the stream that would otherwise become improperly decoded if simply pulled from -the stream as `Buffer` objects. - -Encoding can be disabled by calling `readable.setEncoding(null)`. This approach -is useful when working with binary data or with large multi-byte strings spread -out over multiple chunks. - -```js -const readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -##### readable.unpipe([destination]) - - -* `destination` {stream.Writable} Optional specific stream to unpipe - -The `readable.unpipe()` method detaches a Writable stream previously attached -using the [`stream.pipe()`][] method. - -If the `destination` is not specified, then *all* pipes are detached. - -If the `destination` is specified, but no pipe is set up for it, then -the method does nothing. - -```js -const readable = getReadableStreamSomehow(); -const writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('Stop writing to file.txt'); - readable.unpipe(writable); - console.log('Manually close the file stream'); - writable.end(); -}, 1000); -``` - -##### readable.unshift(chunk) - - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -The `readable.unshift()` method pushes a chunk of data back into the internal -buffer. This is useful in certain situations where a stream is being consumed by -code that needs to "un-consume" some amount of data that it has optimistically -pulled out of the source, so that the data can be passed on to some other party. - -*Note*: The `stream.unshift(chunk)` method cannot be called after the -[`'end'`][] event has been emitted or a runtime error will be thrown. - -Developers using `stream.unshift()` often should consider switching to -use of a [Transform][] stream instead. See the [API for Stream Implementers][] -section for more information. - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - const decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - const remaining = split.join('\n\n'); - const buf = Buffer.from(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -*Note*: Unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `readable.unshift()` is called -during a read (i.e. from within a [`stream._read()`][stream-_read] -implementation on a custom stream). Following the call to `readable.unshift()` -with an immediate [`stream.push('')`][stream-push] will reset the reading state -appropriately, however it is best to simply avoid calling `readable.unshift()` -while in the process of performing a read. - -##### readable.wrap(stream) - - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire `stream` module API as it is currently defined. (See [Compatibility][] -for more information.) - -When using an older Node.js library that emits [`'data'`][] events and has a -[`stream.pause()`][stream-pause] method that is advisory only, the -`readable.wrap()` method can be used to create a [Readable][] stream that uses -the old stream as its data source. - -It will rarely be necessary to use `readable.wrap()` but the method has been -provided as a convenience for interacting with older Node.js applications and -libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Duplex and Transform Streams - -#### Class: stream.Duplex - - - - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -#### Class: stream.Transform - - - - -Transform streams are [Duplex][] streams where the output is in some way -related to the input. Like all [Duplex][] streams, Transform streams -implement both the [Readable][] and [Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - - -## API for Stream Implementers - - - -The `stream` module API has been designed to make it possible to easily -implement streams using JavaScript's prototypical inheritance model. - -First, a stream developer would declare a new JavaScript class that extends one -of the four basic stream classes (`stream.Writable`, `stream.Readable`, -`stream.Duplex`, or `stream.Transform`), making sure the call the appropriate -parent class constructor: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - super(options); - } -} -``` - -The new stream class must then implement one or more specific methods, depending -on the type of stream being created, as detailed in the chart below: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -*Note*: The implementation code for a stream should *never* call the "public" -methods of a stream that are intended for use by consumers (as described in -the [API for Stream Consumers][] section). Doing so may lead to adverse -side effects in application code consuming the stream. - -### Simplified Construction - -For many simple cases, it is possible to construct a stream without relying on -inheritance. This can be accomplished by directly creating instances of the -`stream.Writable`, `stream.Readable`, `stream.Duplex` or `stream.Transform` -objects and passing appropriate methods as constructor options. - -For example: - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - // ... - } -}); -``` - -### Implementing a Writable Stream - -The `stream.Writable` class is extended to implement a [Writable][] stream. - -Custom Writable streams *must* call the `new stream.Writable([options])` -constructor and implement the `writable._write()` method. The -`writable._writev()` method *may* also be implemented. - -#### Constructor: new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Defaults to - `16384` (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Defaults to `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. When set, - it becomes possible to write JavaScript values other than string or - `Buffer` if supported by the stream implementation. Defaults to `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -For example: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - // Calls the stream.Writable() constructor - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Writable = require('stream').Writable; -const util = require('util'); - -function MyWritable(options) { - if (!(this instanceof MyWritable)) - return new MyWritable(options); - Writable.call(this, options); -} -util.inherits(MyWritable, Writable); -``` - -Or, using the Simplified Constructor approach: - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - // ... - }, - writev(chunks, callback) { - // ... - } -}); -``` - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then `encoding` is the - character encoding of that string. If chunk is a `Buffer`, or if the - stream is operating in object mode, `encoding` may be ignored. -* `callback` {Function} Call this function (optionally with an error - argument) when processing is complete for the supplied chunk. - -All Writable stream implementations must provide a -[`writable._write()`][stream-_write] method to send data to the underlying -resource. - -*Note*: [Transform][] streams provide their own implementation of the -[`writable._write()`][stream-_write]. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Writable -class methods only. - -The `callback` method must be called to signal either that the write completed -successfully or failed with an error. The first argument passed to the -`callback` must be the `Error` object if the call failed or `null` if the -write succeeded. - -It is important to note that all calls to `writable.write()` that occur between -the time `writable._write()` is called and the `callback` is called will cause -the written data to be buffered. Once the `callback` is invoked, the stream will -emit a `'drain'` event. If a stream implementation is capable of processing -multiple chunks of data at once, the `writable._writev()` method should be -implemented. - -If the `decodeStrings` property is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the character encoding of the string. This is to support -implementations that have an optimized handling for certain string -data encodings. If the `decodeStrings` property is explicitly set to `false`, -the `encoding` argument can be safely ignored, and `chunk` will always be a -`Buffer`. - -The `writable._write()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} A callback function (optionally with an error - argument) to be invoked when processing is complete for the supplied chunks. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Writable -class methods only. - -The `writable._writev()` method may be implemented in addition to -`writable._write()` in stream implementations that are capable of processing -multiple chunks of data at once. If implemented, the method will be called with -all chunks of data currently buffered in the write queue. - -The `writable._writev()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### Errors While Writing - -It is recommended that errors occurring during the processing of the -`writable._write()` and `writable._writev()` methods are reported by invoking -the callback and passing the error as the first argument. This will cause an -`'error'` event to be emitted by the Writable. Throwing an Error from within -`writable._write()` can result in expected and inconsistent behavior depending -on how the stream is being used. Using the callback ensures consistent and -predictable handling of errors. - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - if (chunk.toString().indexOf('a') >= 0) { - callback(new Error('chunk is invalid')); - } else { - callback(); - } - } -}); -``` - -#### An Example Writable Stream - -The following illustrates a rather simplistic (and somewhat pointless) custom -Writable stream implementation. While this specific Writable stream instance -is not of any real particular usefulness, the example illustrates each of the -required elements of a custom [Writable][] stream instance: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - super(options); - } - - _write(chunk, encoding, callback) { - if (chunk.toString().indexOf('a') >= 0) { - callback(new Error('chunk is invalid')); - } else { - callback(); - } - } -} -``` - -### Implementing a Readable Stream - -The `stream.Readable` class is extended to implement a [Readable][] stream. - -Custom Readable streams *must* call the `new stream.Readable([options])` -constructor and implement the `readable._read()` method. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Defaults to `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Defaults to `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Defaults to `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -For example: - -```js -const Readable = require('stream').Readable; - -class MyReadable extends Readable { - constructor(options) { - // Calls the stream.Readable(options) constructor - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Readable = require('stream').Readable; -const util = require('util'); - -function MyReadable(options) { - if (!(this instanceof MyReadable)) - return new MyReadable(options); - Readable.call(this, options); -} -util.inherits(MyReadable, Readable); -``` - -Or, using the Simplified Constructor approach: - -```js -const Readable = require('stream').Readable; - -const myReadable = new Readable({ - read(size) { - // ... - } -}); -``` - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -All Readable stream implementations must provide an implementation of the -`readable._read()` method to fetch data from the underlying resource. - -When `readable._read()` is called, if data is available from the resource, the -implementation should begin pushing that data into the read queue using the -[`this.push(dataChunk)`][stream-push] method. `_read()` should continue reading -from the resource and pushing data until `readable.push()` returns `false`. Only -when `_read()` is called again after it has stopped should it resume pushing -additional data onto the queue. - -*Note*: Once the `readable._read()` method has been called, it will not be -called again until the [`readable.push()`][stream-push] method is called. - -The `size` argument is advisory. For implementations where a "read" is a -single operation that returns data can use the `size` argument to determine how -much data to fetch. Other implementations may ignore this argument and simply -provide data whenever it becomes available. There is no need to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -The `readable._read()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### readable.push(chunk[, encoding]) - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* Returns {Boolean} `true` if additional chunks of data may continued to be - pushed; `false` otherwise. - -When `chunk` is a `Buffer` or `string`, the `chunk` of data will be added to the -internal queue for users of the stream to consume. Passing `chunk` as `null` -signals the end of the stream (EOF), after which no more data can be written. - -When the Readable is operating in paused mode, the data added with -`readable.push()` can be read out by calling the -[`readable.read()`][stream-read] method when the [`'readable'`][] event is -emitted. - -When the Readable is operating in flowing mode, the data added with -`readable.push()` will be delivered by emitting a `'data'` event. - -The `readable.push()` method is designed to be as flexible as possible. For -example, when wrapping a lower-level source that provides some form of -pause/resume mechanism, and a data callback, the low-level source can be wrapped -by the custom Readable instance as illustrated in the following example: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -class SourceWrapper extends Readable { - constructor(options) { - super(options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; - } - // _read will be called when the stream wants to pull more data in - // the advisory size argument is ignored in this case. - _read(size) { - this._source.readStart(); - } -} -``` -*Note*: The `readable.push()` method is intended be called only by Readable -Implementers, and only from within the `readable._read()` method. - -#### Errors While Reading - -It is recommended that errors occurring during the processing of the -`readable._read()` method are emitted using the `'error'` event rather than -being thrown. Throwing an Error from within `readable._read()` can result in -expected and inconsistent behavior depending on whether the stream is operating -in flowing or paused mode. Using the `'error'` event ensures consistent and -predictable handling of errors. - -```js -const Readable = require('stream').Readable; - -const myReadable = new Readable({ - read(size) { - if (checkSomeErrorCondition()) { - process.nextTick(() => this.emit('error', err)); - return; - } - // do some work - } -}); -``` - -#### An Example Counting Stream - - - -The following is a basic example of a Readable stream that emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; - -class Counter extends Readable { - constructor(opt) { - super(opt); - this._max = 1000000; - this._index = 1; - } - - _read() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = Buffer.from(str, 'ascii'); - this.push(buf); - } - } -} -``` - -### Implementing a Duplex Stream - -A [Duplex][] stream is one that implements both [Readable][] and [Writable][], -such as a TCP socket connection. - -Because Javascript does not have support for multiple inheritance, the -`stream.Duplex` class is extended to implement a [Duplex][] stream (as opposed -to extending the `stream.Readable` *and* `stream.Writable` classes). - -*Note*: The `stream.Duplex` class prototypically inherits from `stream.Readable` -and parasitically from `stream.Writable`. - -Custom Duplex streams *must* call the `new stream.Duplex([options])` -constructor and implement *both* the `readable._read()` and -`writable._write()` methods. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Defaults to `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -For example: - -```js -const Duplex = require('stream').Duplex; - -class MyDuplex extends Duplex { - constructor(options) { - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Duplex = require('stream').Duplex; -const util = require('util'); - -function MyDuplex(options) { - if (!(this instanceof MyDuplex)) - return new MyDuplex(options); - Duplex.call(this, options); -} -util.inherits(MyDuplex, Duplex); -``` - -Or, using the Simplified Constructor approach: - -```js -const Duplex = require('stream').Duplex; - -const myDuplex = new Duplex({ - read(size) { - // ... - }, - write(chunk, encoding, callback) { - // ... - } -}); -``` - -#### An Example Duplex Stream - -The following illustrates a simple example of a Duplex stream that wraps a -hypothetical lower-level source object to which data can be written, and -from which data can be read, albeit using an API that is not compatible with -Node.js streams. -The following illustrates a simple example of a Duplex stream that buffers -incoming written data via the [Writable][] interface that is read back out -via the [Readable][] interface. - -```js -const Duplex = require('stream').Duplex; -const kSource = Symbol('source'); - -class MyDuplex extends Duplex { - constructor(source, options) { - super(options); - this[kSource] = source; - } - - _write(chunk, encoding, callback) { - // The underlying source only deals with strings - if (Buffer.isBuffer(chunk)) - chunk = chunk.toString(encoding); - this[kSource].writeSomeData(chunk, encoding); - callback(); - } - - _read(size) { - this[kSource].fetchSomeData(size, (data, encoding) => { - this.push(Buffer.from(data, encoding)); - }); - } -} -``` - -The most important aspect of a Duplex stream is that the Readable and Writable -sides operate independently of one another despite co-existing within a single -object instance. - -#### Object Mode Duplex Streams - -For Duplex streams, `objectMode` can be set exclusively for either the Readable -or Writable side using the `readableObjectMode` and `writableObjectMode` options -respectively. - -In the following example, for instance, a new Transform stream (which is a -type of [Duplex][] stream) is created that has an object mode Writable side -that accepts JavaScript numbers that are converted to hexidecimal strings on -the Readable side. - -```js -const Transform = require('stream').Transform; - -// All Transform streams are also Duplex Streams -const myTransform = new Transform({ - writableObjectMode: true, - - transform(chunk, encoding, callback) { - // Coerce the chunk to a number if necessary - chunk |= 0; - - // Transform the chunk into something else. - const data = chunk.toString(16); - - // Push the data onto the readable queue. - callback(null, '0'.repeat(data.length % 2) + data); - } -}); - -myTransform.setEncoding('ascii'); -myTransform.on('data', (chunk) => console.log(chunk)); - -myTransform.write(1); - // Prints: 01 -myTransform.write(10); - // Prints: 0a -myTransform.write(100); - // Prints: 64 -``` - -### Implementing a Transform Stream - -A [Transform][] stream is a [Duplex][] stream where the output is computed -in some way from the input. Examples include [zlib][] streams or [crypto][] -streams that compress, encrypt, or decrypt data. - -*Note*: There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A `zlib` stream will produce output -that is either much smaller or much larger than its input. - -The `stream.Transform` class is extended to implement a [Transform][] stream. - -The `stream.Transform` class prototypically inherits from `stream.Duplex` and -implements its own versions of the `writable._write()` and `readable._read()` -methods. Custom Transform implementations *must* implement the -[`transform._transform()`][stream-_transform] method and *may* also implement -the [`transform._flush()`][stream-_flush] method. - -*Note*: Care must be taken when using Transform streams in that data written -to the stream can cause the Writable side of the stream to become paused if -the output on the Readable side is not consumed. - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -For example: - -```js -const Transform = require('stream').Transform; - -class MyTransform extends Transform { - constructor(options) { - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Transform = require('stream').Transform; -const util = require('util'); - -function MyTransform(options) { - if (!(this instanceof MyTransform)) - return new MyTransform(options); - Transform.call(this, options); -} -util.inherits(MyTransform, Transform); -``` - -Or, using the Simplified Constructor approach: - -```js -const Transform = require('stream').Transform; - -const myTransform = new Transform({ - transform(chunk, encoding, callback) { - // ... - } -}); -``` - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable` -and `stream.Readable` classes, respectively. The `'finish'` event is emitted -after [`stream.end()`][stream-end] is called and all chunks have been processed -by [`stream._transform()`][stream-_transform]. The `'end'` event is emitted -after all data has been output, which occurs after the callback in -[`transform._flush()`][stream-_flush] has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} A callback function (optionally with an error - argument) to be called when remaining data has been flushed. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -In some cases, a transform operation may need to emit an additional bit of -data at the end of the stream. For example, a `zlib` compression stream will -store an amount of internal state used to optimally compress the output. When -the stream ends, however, that additional data needs to be flushed so that the -compressed data will be complete. - -Custom [Transform][] implementations *may* implement the `transform._flush()` -method. This will be called when there is no more written data to be consumed, -but before the [`'end'`][] event is emitted signaling the end of the -[Readable][] stream. - -Within the `transform._flush()` implementation, the `readable.push()` method -may be called zero or more times, as appropriate. The `callback` function must -be called when the flush operation is complete. - -The `transform._flush()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} A callback function (optionally with an error - argument and data) to be called after the supplied `chunk` has been - processed. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. The `transform._transform()` -implementation handles the bytes being written, computes an output, then passes -that output off to the readable portion using the `readable.push()` method. - -The `transform.push()` method may be called zero or more times to generate -output from a single input chunk, depending on how much is to be output -as a result of the chunk. - -It is possible that no output is generated from any given chunk of input data. - -The `callback` function must be called only when the current chunk is completely -consumed. The first argument passed to the `callback` must be an `Error` object -if an error occurred while processing the input or `null` otherwise. If a second -argument is passed to the `callback`, it will be forwarded on to the -`readable.push()` method. In other words the following are equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -The `transform._transform()` method is prefixed with an underscore because it -is internal to the class that defines it, and should never be called directly by -user programs. - -#### Class: stream.PassThrough - -The `stream.PassThrough` class is a trivial implementation of a [Transform][] -stream that simply passes the input bytes across to the output. Its purpose is -primarily for examples and testing, but there are some use cases where -`stream.PassThrough` is useful as a building block for novel sorts of streams. - -## Additional Notes - - - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for calls the [`stream.read()`][stream-read] method, - [`'data'`][] events would begin emitting immediately. Applications that - would need to perform some amount of work to decide how to handle data - were required to store read data into buffers so the data would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that it was still necessary to be prepared to receive - [`'data'`][] events *even when the stream was in a paused state*. - -In Node.js v0.10, the [Readable][] class was added. For backwards compatibility -with older Node.js programs, Readable streams switch into "flowing mode" when a -[`'data'`][] event handler is added, or when the -[`stream.resume()`][stream-resume] method is called. The effect is that, even -when not using the new [`stream.read()`][stream-read] method and -[`'readable'`][] event, it is no longer necessary to worry about losing -[`'data'`][] chunks. - -While most applications will continue to function normally, this introduces an -edge case in the following conditions: - -* No [`'data'`][] event listener is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('The message was received but was not processed.\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, the socket remains -paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to begin the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('The message was received but was not processed.\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`readable.wrap()`][`stream.wrap()`] method. - - -### `readable.read(0)` - -There are some cases where it is necessary to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In such cases, it is possible to call `readable.read(0)`, which will -always return `null`. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -While most applications will almost never need to do this, there are -situations within Node.js where this is done, particularly in the -Readable stream class internals. - -### `readable.push('')` - -Use of `readable.push('')` is not recommended. - -Pushing a zero-byte string or `Buffer` to a stream that is not in object mode -has an interesting side effect. Because it *is* a call to -[`readable.push()`][stream-push], the call will end the reading process. -However, because the argument is an empty string, no data is added to the -readable buffer so there is nothing for a user to consume. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.3.1/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v6.3.1/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v6.3.1/api/tls.html#tls_class_cryptostream -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementers]: #stream_api_for_stream_implementers -[child process stdin]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_writestream -[`fs.createReadStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createreadstream_path_options -[`fs.createWriteStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createwritestream_path_options -[`net.Socket`]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket -[`zlib.createDeflate()`]: https://nodejs.org/docs/v6.3.1/api/zlib.html#zlib_zlib_createdeflate_options -[HTTP requests, on the client]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js index 208cc65f1cca99..3a7d42d62b86a3 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js @@ -10,6 +10,10 @@ var processNextTick = require('process-nextick-args'); var isArray = require('isarray'); /**/ +/**/ +var Duplex; +/**/ + Readable.ReadableState = ReadableState; /**/ @@ -57,6 +61,8 @@ var StringDecoder; util.inherits(Readable, Stream); function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. if (typeof emitter.prependListener === 'function') { return emitter.prependListener(event, fn); } else { @@ -68,7 +74,6 @@ function prependListener(emitter, event, fn) { } } -var Duplex; function ReadableState(options, stream) { Duplex = Duplex || require('./_stream_duplex'); @@ -138,7 +143,6 @@ function ReadableState(options, stream) { } } -var Duplex; function Readable(options) { Duplex = Duplex || require('./_stream_duplex'); @@ -461,7 +465,7 @@ function maybeReadMore_(stream, state) { // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); + this.emit('error', new Error('_read() is not implemented')); }; Readable.prototype.pipe = function (dest, pipeOpts) { @@ -639,16 +643,16 @@ Readable.prototype.unpipe = function (dest) { state.pipesCount = 0; state.flowing = false; - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this); }return this; } // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; + var index = indexOf(state.pipes, dest); + if (index === -1) return this; - state.pipes.splice(i, 1); + state.pipes.splice(index, 1); state.pipesCount -= 1; if (state.pipesCount === 1) state.pipes = state.pipes[0]; diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js index dbc996ede62363..cd2583207f5b20 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js @@ -94,7 +94,6 @@ function Transform(options) { this._transformState = new TransformState(this); - // when the writable side finishes, then flush out anything remaining. var stream = this; // start out asking for a readable event once data is transformed. @@ -111,9 +110,10 @@ function Transform(options) { if (typeof options.flush === 'function') this._flush = options.flush; } + // When the writable side finishes, then flush out anything remaining. this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); + if (typeof this._flush === 'function') this._flush(function (er, data) { + done(stream, er, data); });else done(stream); }); } @@ -134,7 +134,7 @@ Transform.prototype.push = function (chunk, encoding) { // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('Not implemented'); + throw new Error('_transform() is not implemented'); }; Transform.prototype._write = function (chunk, encoding, cb) { @@ -164,9 +164,11 @@ Transform.prototype._read = function (n) { } }; -function done(stream, er) { +function done(stream, er, data) { if (er) return stream.emit('error', er); + if (data !== null && data !== undefined) stream.push(data); + // if there's nothing in the write buffer, then that means // that nothing more will ever be provided var ws = stream._writableState; diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js index ed5efcbd203208..4d9c62ba62ff2b 100644 --- a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js +++ b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js @@ -14,6 +14,10 @@ var processNextTick = require('process-nextick-args'); var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; /**/ +/**/ +var Duplex; +/**/ + Writable.WritableState = WritableState; /**/ @@ -54,7 +58,6 @@ function WriteReq(chunk, encoding, cb) { this.next = null; } -var Duplex; function WritableState(options, stream) { Duplex = Duplex || require('./_stream_duplex'); @@ -76,6 +79,7 @@ function WritableState(options, stream) { // cast to ints. this.highWaterMark = ~ ~this.highWaterMark; + // drain event flag. this.needDrain = false; // at the start of calling end() this.ending = false; @@ -150,7 +154,7 @@ function WritableState(options, stream) { this.corkedRequestsFree = new CorkedRequest(this); } -WritableState.prototype.getBuffer = function writableStateGetBuffer() { +WritableState.prototype.getBuffer = function getBuffer() { var current = this.bufferedRequest; var out = []; while (current) { @@ -170,13 +174,37 @@ WritableState.prototype.getBuffer = function writableStateGetBuffer() { } catch (_) {} })(); -var Duplex; +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + function Writable(options) { Duplex = Duplex || require('./_stream_duplex'); - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } this._writableState = new WritableState(options, this); @@ -436,7 +464,7 @@ function clearBuffer(stream, state) { } Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); + cb(new Error('_write() is not implemented')); }; Writable.prototype._writev = null; diff --git a/deps/npm/node_modules/readable-stream/package.json b/deps/npm/node_modules/readable-stream/package.json index 6cdacd2f62da22..bc5214bf185c79 100644 --- a/deps/npm/node_modules/readable-stream/package.json +++ b/deps/npm/node_modules/readable-stream/package.json @@ -2,57 +2,61 @@ "_args": [ [ { - "raw": "readable-stream@2.1.5", + "raw": "readable-stream@2.2.2", "scope": null, "escapedName": "readable-stream", "name": "readable-stream", - "rawSpec": "2.1.5", - "spec": "2.1.5", + "rawSpec": "2.2.2", + "spec": "2.2.2", "type": "version" }, - "/Users/zkat/Documents/code/npm" + "/Users/rebecca/code/npm-latest" ] ], - "_from": "readable-stream@2.1.5", - "_id": "readable-stream@2.1.5", + "_from": "readable-stream@2.2.2", + "_id": "readable-stream@2.2.2", "_inCache": true, "_location": "/readable-stream", - "_nodeVersion": "5.12.0", + "_nodeVersion": "7.1.0", "_npmOperationalInternal": { - "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.1.5.tgz_1471463532993_0.15824943827465177" + "host": "packages-18-east.internal.npmjs.com", + "tmp": "tmp/readable-stream-2.2.2.tgz_1479128709230_0.5291099038440734" }, "_npmUser": { "name": "cwmma", "email": "calvin.metcalf@gmail.com" }, - "_npmVersion": "3.8.6", + "_npmVersion": "3.10.7", "_phantomChildren": {}, "_requested": { - "raw": "readable-stream@2.1.5", + "raw": "readable-stream@2.2.2", "scope": null, "escapedName": "readable-stream", "name": "readable-stream", - "rawSpec": "2.1.5", - "spec": "2.1.5", + "rawSpec": "2.2.2", + "spec": "2.2.2", "type": "version" }, "_requiredBy": [ "#USER", "/", "/fs-write-stream-atomic", + "/mississippi/duplexify", + "/mississippi/flush-write-stream", + "/mississippi/from2", "/node-gyp/npmlog/are-we-there-yet", "/npm-registry-client/npmlog/are-we-there-yet", "/npmlog/are-we-there-yet", "/sha", "/tap", + "/tap/tap-mocha-reporter", "/tap/tap-parser" ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.5.tgz", - "_shasum": "66fa8b720e1438b364681f2ad1a63c618448c9d0", + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz", + "_shasum": "a9e6fec3c7dda85f8bb1b3ba7028604556fc825e", "_shrinkwrap": null, - "_spec": "readable-stream@2.1.5", - "_where": "/Users/zkat/Documents/code/npm", + "_spec": "readable-stream@2.2.2", + "_where": "/Users/rebecca/code/npm-latest", "browser": { "util": false }, @@ -72,6 +76,7 @@ "devDependencies": { "assert": "~1.4.0", "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", "nyc": "^6.4.0", "tap": "~0.7.1", "tape": "~4.5.1", @@ -79,10 +84,10 @@ }, "directories": {}, "dist": { - "shasum": "66fa8b720e1438b364681f2ad1a63c618448c9d0", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.5.tgz" + "shasum": "a9e6fec3c7dda85f8bb1b3ba7028604556fc825e", + "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.2.2.tgz" }, - "gitHead": "758c8b3845af855fde736b6a7f58a15fba00d1e7", + "gitHead": "f239454e183d2032c0eb7d79a1c08f674fdd8db4", "homepage": "https://github.com/nodejs/readable-stream#readme", "keywords": [ "readable", @@ -129,5 +134,5 @@ "test": "tap test/parallel/*.js test/ours/*.js", "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" }, - "version": "2.1.5" + "version": "2.2.2" } diff --git a/deps/npm/node_modules/request/.npmignore b/deps/npm/node_modules/request/.npmignore deleted file mode 100644 index 67fe11cc0a77bb..00000000000000 --- a/deps/npm/node_modules/request/.npmignore +++ /dev/null @@ -1,6 +0,0 @@ -coverage -tests -node_modules -examples -release.sh -disabled.appveyor.yml diff --git a/deps/npm/node_modules/request/.travis.yml b/deps/npm/node_modules/request/.travis.yml deleted file mode 100644 index 9be8247c7b52bc..00000000000000 --- a/deps/npm/node_modules/request/.travis.yml +++ /dev/null @@ -1,21 +0,0 @@ - -language: node_js - -node_js: - - node - - 6 - - 4 - - 0.12 - -after_script: - - npm run test-cov - - codecov - - cat ./coverage/lcov.info | coveralls - -webhooks: - urls: https://webhooks.gitter.im/e/237280ed4796c19cc626 - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: false # default: false - -sudo: false diff --git a/deps/npm/node_modules/request/CHANGELOG.md b/deps/npm/node_modules/request/CHANGELOG.md index 042c6e526f3a0d..7a9b2abf9e4d1e 100644 --- a/deps/npm/node_modules/request/CHANGELOG.md +++ b/deps/npm/node_modules/request/CHANGELOG.md @@ -1,5 +1,33 @@ ## Change Log +### v2.79.0 (2016/11/18) +- [#2368](https://github.com/request/request/pull/2368) Fix typeof check in test-pool.js (@forivall) +- [#2394](https://github.com/request/request/pull/2394) Use `files` in package.json (@SimenB) +- [#2463](https://github.com/request/request/pull/2463) AWS support for session tokens for temporary credentials (@simov) +- [#2467](https://github.com/request/request/pull/2467) Migrate to uuid (@simov, @antialias) +- [#2459](https://github.com/request/request/pull/2459) Update taper to version 0.5.0 🚀 (@greenkeeperio-bot) +- [#2448](https://github.com/request/request/pull/2448) Make other connect timeout test more reliable too (@mscdex) + +### v2.78.0 (2016/11/03) +- [#2447](https://github.com/request/request/pull/2447) Always set request timeout on keep-alive connections (@mscdex) + +### v2.77.0 (2016/11/03) +- [#2439](https://github.com/request/request/pull/2439) Fix socket 'connect' listener handling (@mscdex) +- [#2442](https://github.com/request/request/pull/2442) 👻😱 Node.js 0.10 is unmaintained 😱👻 (@greenkeeperio-bot) +- [#2435](https://github.com/request/request/pull/2435) Add followOriginalHttpMethod to redirect to original HTTP method (@kirrg001) +- [#2414](https://github.com/request/request/pull/2414) Improve test-timeout reliability (@mscdex) + +### v2.76.0 (2016/10/25) +- [#2424](https://github.com/request/request/pull/2424) Handle buffers directly instead of using "bl" (@zertosh) +- [#2415](https://github.com/request/request/pull/2415) Re-enable timeout tests on Travis + other fixes (@mscdex) +- [#2431](https://github.com/request/request/pull/2431) Improve timeouts accuracy and node v6.8.0+ compatibility (@mscdex, @greenkeeperio-bot) +- [#2428](https://github.com/request/request/pull/2428) Update qs to version 6.3.0 🚀 (@greenkeeperio-bot) +- [#2420](https://github.com/request/request/pull/2420) change .on to .once, remove possible memory leaks (@duereg) +- [#2426](https://github.com/request/request/pull/2426) Remove "isFunction" helper in favor of "typeof" check (@zertosh) +- [#2425](https://github.com/request/request/pull/2425) Simplify "defer" helper creation (@zertosh) +- [#2402](https://github.com/request/request/pull/2402) form-data@2.1.1 breaks build 🚨 (@greenkeeperio-bot) +- [#2393](https://github.com/request/request/pull/2393) Update form-data to version 2.1.0 🚀 (@greenkeeperio-bot) + ### v2.75.0 (2016/09/17) - [#2381](https://github.com/request/request/pull/2381) Drop support for Node 0.10 (@simov) - [#2377](https://github.com/request/request/pull/2377) Update form-data to version 2.0.0 🚀 (@greenkeeperio-bot) @@ -476,7 +504,6 @@ ### v2.29.0 (2013/12/06) - [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris) - ### v2.28.0 (2013/12/04) - [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort) - [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit) @@ -493,15 +520,9 @@ - [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar) - [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm) - [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl) - -### v2.27.0 (2013/08/15) - [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo) - -### v2.26.0 (2013/08/07) - [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander) - [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker) - -### v2.25.0 (2013/07/23) - [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath) - [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK) - [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko) @@ -574,7 +595,7 @@ - [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs) - [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1) - [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs) -- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas) +- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas, @vpulim) - [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike) - [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry) - [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek) @@ -592,10 +613,10 @@ - [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn) - [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax) - [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek) -- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso) +- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso, @vpulim) - [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom) - [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup) -- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@milewise) +- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@vpulim) - [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs) - [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs) - [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs) diff --git a/deps/npm/node_modules/request/CONTRIBUTING.md b/deps/npm/node_modules/request/CONTRIBUTING.md deleted file mode 100644 index 8aa6999acd820e..00000000000000 --- a/deps/npm/node_modules/request/CONTRIBUTING.md +++ /dev/null @@ -1,81 +0,0 @@ - -# Contributing to Request - -:+1::tada: First off, thanks for taking the time to contribute! :tada::+1: - -The following is a set of guidelines for contributing to Request and its packages, which are hosted in the [Request Organization](https://github.com/request) on GitHub. -These are just guidelines, not rules, use your best judgment and feel free to propose changes to this document in a pull request. - - -## Submitting an Issue - -1. Provide a small self **sufficient** code example to **reproduce** the issue. -2. Run your test code using [request-debug](https://github.com/request/request-debug) and copy/paste the results inside the issue. -3. You should **always** use fenced code blocks when submitting code examples or any other formatted output: -

-  ```js
-  put your javascript code here
-  ```
-
-  ```
-  put any other formatted output here,
-  like for example the one returned from using request-debug
-  ```
-  
- -If the problem cannot be reliably reproduced, the issue will be marked as `Not enough info (see CONTRIBUTING.md)`. - -If the problem is not related to request the issue will be marked as `Help (please use Stackoverflow)`. - - -## Submitting a Pull Request - -1. In almost all of the cases your PR **needs tests**. Make sure you have any. -2. Run `npm test` locally. Fix any errors before pushing to GitHub. -3. After submitting the PR a build will be triggered on TravisCI. Wait for it to ends and make sure all jobs are passing. - - ------------------------------------------ - - -## Becoming a Contributor - -Individuals making significant and valuable contributions are given -commit-access to the project to contribute as they see fit. This project is -more like an open wiki than a standard guarded open source project. - - -## Rules - -There are a few basic ground-rules for contributors: - -1. **No `--force` pushes** or modifying the Git history in any way. -1. **Non-master branches** ought to be used for ongoing work. -1. **Any** change should be added through Pull Request. -1. **External API changes and significant modifications** ought to be subject - to an **internal pull-request** to solicit feedback from other contributors. -1. Internal pull-requests to solicit feedback are *encouraged* for any other - non-trivial contribution but left to the discretion of the contributor. -1. For significant changes wait a full 24 hours before merging so that active - contributors who are distributed throughout the world have a chance to weigh - in. -1. Contributors should attempt to adhere to the prevailing code-style. -1. Run `npm test` locally before submitting your PR, to catch any easy to miss - style & testing issues. To diagnose test failures, there are two ways to - run a single test file: - - `node_modules/.bin/taper tests/test-file.js` - run using the default - [`taper`](https://github.com/nylen/taper) test reporter. - - `node tests/test-file.js` - view the raw - [tap](https://testanything.org/) output. - - -## Releases - -Declaring formal releases remains the prerogative of the project maintainer. - - -## Changes to this arrangement - -This is an experiment and feedback is welcome! This document may also be -subject to pull-requests or changes by contributors where you believe you have -something valuable to add or change. diff --git a/deps/npm/node_modules/request/README.md b/deps/npm/node_modules/request/README.md index 6eaaa0547ffc43..2ddfe72c5307dd 100644 --- a/deps/npm/node_modules/request/README.md +++ b/deps/npm/node_modules/request/README.md @@ -755,13 +755,14 @@ The first argument can be either a `url` or an `options` object. The only requir - `auth` - A hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above. - `oauth` - Options for OAuth HMAC-SHA1 signing. See documentation above. - `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example). -- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`. Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. **Note:** you need to `npm install aws4` first. +- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`, and optionally `session` (note that this only works for services that require session as part of the canonical string). Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. **Note:** you need to `npm install aws4` first. - `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options. --- - `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise. - `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`) +- `followOriginalHttpMethod` - by default we redirect to HTTP method GET. you can enable this property to redirect to the original HTTP method (default: `false`) - `maxRedirects` - the maximum number of redirects to follow (default: `10`) - `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain. diff --git a/deps/npm/node_modules/request/codecov.yml b/deps/npm/node_modules/request/codecov.yml deleted file mode 100644 index acd3f33ceb5d2b..00000000000000 --- a/deps/npm/node_modules/request/codecov.yml +++ /dev/null @@ -1,2 +0,0 @@ - -comment: false diff --git a/deps/npm/node_modules/request/index.js b/deps/npm/node_modules/request/index.js index 911a90dbb5a5cb..9ec65ea268123b 100755 --- a/deps/npm/node_modules/request/index.js +++ b/deps/npm/node_modules/request/index.js @@ -18,8 +18,7 @@ var extend = require('extend') , cookies = require('./lib/cookies') , helpers = require('./lib/helpers') -var isFunction = helpers.isFunction - , paramsHaveRequestBody = helpers.paramsHaveRequestBody +var paramsHaveRequestBody = helpers.paramsHaveRequestBody // organize params for patch, post, put, head, del @@ -95,7 +94,7 @@ function wrapRequestMethod (method, options, requester, verb) { target.method = verb.toUpperCase() } - if (isFunction(requester)) { + if (typeof requester === 'function') { method = requester } diff --git a/deps/npm/node_modules/request/lib/auth.js b/deps/npm/node_modules/request/lib/auth.js index 1cb695216f6b16..559ca57be9d3ad 100644 --- a/deps/npm/node_modules/request/lib/auth.js +++ b/deps/npm/node_modules/request/lib/auth.js @@ -1,7 +1,7 @@ 'use strict' var caseless = require('caseless') - , uuid = require('node-uuid') + , uuid = require('uuid') , helpers = require('./helpers') var md5 = helpers.md5 diff --git a/deps/npm/node_modules/request/lib/helpers.js b/deps/npm/node_modules/request/lib/helpers.js index 356ff748e2f941..f9d727e382ad0b 100644 --- a/deps/npm/node_modules/request/lib/helpers.js +++ b/deps/npm/node_modules/request/lib/helpers.js @@ -3,17 +3,9 @@ var jsonSafeStringify = require('json-stringify-safe') , crypto = require('crypto') -function deferMethod() { - if (typeof setImmediate === 'undefined') { - return process.nextTick - } - - return setImmediate -} - -function isFunction(value) { - return typeof value === 'function' -} +var defer = typeof setImmediate === 'undefined' + ? process.nextTick + : setImmediate function paramsHaveRequestBody(params) { return ( @@ -63,7 +55,6 @@ function version () { } } -exports.isFunction = isFunction exports.paramsHaveRequestBody = paramsHaveRequestBody exports.safeStringify = safeStringify exports.md5 = md5 @@ -71,4 +62,4 @@ exports.isReadStream = isReadStream exports.toBase64 = toBase64 exports.copy = copy exports.version = version -exports.defer = deferMethod() +exports.defer = defer diff --git a/deps/npm/node_modules/request/lib/multipart.js b/deps/npm/node_modules/request/lib/multipart.js index c1281726187b6a..3b605bd47b89a4 100644 --- a/deps/npm/node_modules/request/lib/multipart.js +++ b/deps/npm/node_modules/request/lib/multipart.js @@ -1,6 +1,6 @@ 'use strict' -var uuid = require('node-uuid') +var uuid = require('uuid') , CombinedStream = require('combined-stream') , isstream = require('isstream') diff --git a/deps/npm/node_modules/request/lib/oauth.js b/deps/npm/node_modules/request/lib/oauth.js index c24209b897a857..56b39b0f5100de 100644 --- a/deps/npm/node_modules/request/lib/oauth.js +++ b/deps/npm/node_modules/request/lib/oauth.js @@ -3,7 +3,7 @@ var url = require('url') , qs = require('qs') , caseless = require('caseless') - , uuid = require('node-uuid') + , uuid = require('uuid') , oauth = require('oauth-sign') , crypto = require('crypto') diff --git a/deps/npm/node_modules/request/lib/redirect.js b/deps/npm/node_modules/request/lib/redirect.js index 040dfe0e03d335..f8604491f3e7a4 100644 --- a/deps/npm/node_modules/request/lib/redirect.js +++ b/deps/npm/node_modules/request/lib/redirect.js @@ -8,6 +8,7 @@ function Redirect (request) { this.followRedirect = true this.followRedirects = true this.followAllRedirects = false + this.followOriginalHttpMethod = false this.allowRedirect = function () {return true} this.maxRedirects = 10 this.redirects = [] @@ -36,6 +37,9 @@ Redirect.prototype.onRequest = function (options) { if (options.removeRefererHeader !== undefined) { self.removeRefererHeader = options.removeRefererHeader } + if (options.followOriginalHttpMethod !== undefined) { + self.followOriginalHttpMethod = options.followOriginalHttpMethod + } } Redirect.prototype.redirectTo = function (response) { @@ -115,7 +119,7 @@ Redirect.prototype.onResponse = function (response) { ) if (self.followAllRedirects && request.method !== 'HEAD' && response.statusCode !== 401 && response.statusCode !== 307) { - request.method = 'GET' + request.method = self.followOriginalHttpMethod ? request.method : 'GET' } // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215 delete request.src diff --git a/deps/npm/node_modules/request/node_modules/aws4/README.md b/deps/npm/node_modules/request/node_modules/aws4/README.md index 6c55da80528c74..6b002d02f752a6 100644 --- a/deps/npm/node_modules/request/node_modules/aws4/README.md +++ b/deps/npm/node_modules/request/node_modules/aws4/README.md @@ -434,6 +434,15 @@ request(aws4.sign({ /* (HTTP 202, empty response) */ + +// Generate CodeCommit Git access password +var signer = new aws4.RequestSigner({ + service: 'codecommit', + host: 'git-codecommit.us-east-1.amazonaws.com', + method: 'GIT', + path: '/v1/repos/MyAwesomeRepo', +}) +var password = signer.getDateTime() + 'Z' + signer.signature() ``` API diff --git a/deps/npm/node_modules/request/node_modules/aws4/aws4.js b/deps/npm/node_modules/request/node_modules/aws4/aws4.js index cbe5dc90494efe..a54318065b3ed3 100644 --- a/deps/npm/node_modules/request/node_modules/aws4/aws4.js +++ b/deps/npm/node_modules/request/node_modules/aws4/aws4.js @@ -52,6 +52,8 @@ function RequestSigner(request, credentials) { } if (!request.hostname && !request.host) request.hostname = headers.Host || headers.host + + this.isCodeCommitGit = this.service === 'codecommit' && request.method === 'GIT' } RequestSigner.prototype.matchHost = function(host) { @@ -109,7 +111,7 @@ RequestSigner.prototype.prepareRequest = function() { } else { - if (!request.doNotModifyHeaders) { + if (!request.doNotModifyHeaders && !this.isCodeCommitGit) { if (request.body && !headers['Content-Type'] && !headers['content-type']) headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' @@ -153,6 +155,9 @@ RequestSigner.prototype.getDateTime = function() { date = new Date(headers.Date || headers.date || new Date) this.datetime = date.toISOString().replace(/[:\-]|\.\d{3}/g, '') + + // Remove the trailing 'Z' on the timestamp string for CodeCommit git access + if (this.isCodeCommitGit) this.datetime = this.datetime.slice(0, -1) } return this.datetime } @@ -202,8 +207,8 @@ RequestSigner.prototype.canonicalString = function() { decodePath = this.service === 's3' || this.request.doNotEncodePath, decodeSlashesInPath = this.service === 's3', firstValOnly = this.service === 's3', - bodyHash = this.service === 's3' && this.request.signQuery ? - 'UNSIGNED-PAYLOAD' : hash(this.request.body || '', 'hex') + bodyHash = this.service === 's3' && this.request.signQuery ? 'UNSIGNED-PAYLOAD' : + (this.isCodeCommitGit ? '' : hash(this.request.body || '', 'hex')) if (query) { queryStr = encodeRfc3986(querystring.stringify(Object.keys(query).sort().reduce(function(obj, key) { diff --git a/deps/npm/node_modules/request/node_modules/aws4/package.json b/deps/npm/node_modules/request/node_modules/aws4/package.json index 7151d7aae931f2..4e7caf0d301424 100644 --- a/deps/npm/node_modules/request/node_modules/aws4/package.json +++ b/deps/npm/node_modules/request/node_modules/aws4/package.json @@ -10,24 +10,23 @@ "spec": ">=1.2.1 <2.0.0", "type": "range" }, - "/Users/rebecca/code/npm/node_modules/request" + "/Users/ogd/Documents/projects/npm/npm/node_modules/request" ] ], "_from": "aws4@>=1.2.1 <2.0.0", - "_id": "aws4@1.4.1", + "_id": "aws4@1.5.0", "_inCache": true, - "_installable": true, "_location": "/request/aws4", - "_nodeVersion": "4.4.3", + "_nodeVersion": "4.5.0", "_npmOperationalInternal": { - "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/aws4-1.4.1.tgz_1462643218465_0.6527479749638587" + "host": "packages-16-east.internal.npmjs.com", + "tmp": "tmp/aws4-1.5.0.tgz_1476226259635_0.2796843808609992" }, "_npmUser": { "name": "hichaelmart", "email": "michael.hart.au@gmail.com" }, - "_npmVersion": "2.15.4", + "_npmVersion": "2.15.11", "_phantomChildren": {}, "_requested": { "raw": "aws4@^1.2.1", @@ -41,11 +40,11 @@ "_requiredBy": [ "/request" ], - "_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.4.1.tgz", - "_shasum": "fde7d5292466d230e5ee0f4e038d9dfaab08fc61", + "_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz", + "_shasum": "0a29ffb79c31c9e712eeb087e8e7a64b4a56d755", "_shrinkwrap": null, "_spec": "aws4@^1.2.1", - "_where": "/Users/rebecca/code/npm/node_modules/request", + "_where": "/Users/ogd/Documents/projects/npm/npm/node_modules/request", "author": { "name": "Michael Hart", "email": "michael.hart.au@gmail.com", @@ -62,10 +61,10 @@ }, "directories": {}, "dist": { - "shasum": "fde7d5292466d230e5ee0f4e038d9dfaab08fc61", - "tarball": "https://registry.npmjs.org/aws4/-/aws4-1.4.1.tgz" + "shasum": "0a29ffb79c31c9e712eeb087e8e7a64b4a56d755", + "tarball": "https://registry.npmjs.org/aws4/-/aws4-1.5.0.tgz" }, - "gitHead": "f126d3ff80be1ddde0fc6b50bb51a7f199547e81", + "gitHead": "ba136334ee08884c6042c8578a22e376233eef34", "homepage": "https://github.com/mhart/aws4#readme", "keywords": [ "amazon", @@ -137,5 +136,5 @@ "scripts": { "test": "mocha ./test/fast.js ./test/slow.js -b -t 100s -R list" }, - "version": "1.4.1" + "version": "1.5.0" } diff --git a/deps/npm/node_modules/request/node_modules/bl/.jshintrc b/deps/npm/node_modules/request/node_modules/bl/.jshintrc deleted file mode 100644 index c8ef3ca4097f82..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/.jshintrc +++ /dev/null @@ -1,59 +0,0 @@ -{ - "predef": [ ] - , "bitwise": false - , "camelcase": false - , "curly": false - , "eqeqeq": false - , "forin": false - , "immed": false - , "latedef": false - , "noarg": true - , "noempty": true - , "nonew": true - , "plusplus": false - , "quotmark": true - , "regexp": false - , "undef": true - , "unused": true - , "strict": false - , "trailing": true - , "maxlen": 120 - , "asi": true - , "boss": true - , "debug": true - , "eqnull": true - , "esnext": true - , "evil": true - , "expr": true - , "funcscope": false - , "globalstrict": false - , "iterator": false - , "lastsemic": true - , "laxbreak": true - , "laxcomma": true - , "loopfunc": true - , "multistr": false - , "onecase": false - , "proto": false - , "regexdash": false - , "scripturl": true - , "smarttabs": false - , "shadow": false - , "sub": true - , "supernew": false - , "validthis": true - , "browser": true - , "couch": false - , "devel": false - , "dojo": false - , "mootools": false - , "node": true - , "nonstandard": true - , "prototypejs": false - , "rhino": false - , "worker": true - , "wsh": false - , "nomen": false - , "onevar": false - , "passfail": false -} \ No newline at end of file diff --git a/deps/npm/node_modules/request/node_modules/bl/.npmignore b/deps/npm/node_modules/request/node_modules/bl/.npmignore deleted file mode 100644 index 40b878db5b1c97..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules/ \ No newline at end of file diff --git a/deps/npm/node_modules/request/node_modules/bl/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/.travis.yml deleted file mode 100644 index 5cb0480b45457a..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -sudo: false -language: node_js -node_js: - - '0.10' - - '0.12' - - '4' - - '5' -branches: - only: - - master -notifications: - email: - - rod@vagg.org diff --git a/deps/npm/node_modules/request/node_modules/bl/README.md b/deps/npm/node_modules/request/node_modules/bl/README.md deleted file mode 100644 index f7044db26e8659..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/README.md +++ /dev/null @@ -1,200 +0,0 @@ -# bl *(BufferList)* - -[![Build Status](https://travis-ci.org/rvagg/bl.svg?branch=master)](https://travis-ci.org/rvagg/bl) - -**A Node.js Buffer list collector, reader and streamer thingy.** - -[![NPM](https://nodei.co/npm/bl.png?downloads=true&downloadRank=true)](https://nodei.co/npm/bl/) -[![NPM](https://nodei.co/npm-dl/bl.png?months=6&height=3)](https://nodei.co/npm/bl/) - -**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! - -The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. - -```js -const BufferList = require('bl') - -var bl = new BufferList() -bl.append(new Buffer('abcd')) -bl.append(new Buffer('efg')) -bl.append('hi') // bl will also accept & convert Strings -bl.append(new Buffer('j')) -bl.append(new Buffer([ 0x3, 0x4 ])) - -console.log(bl.length) // 12 - -console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' -console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' -console.log(bl.slice(3, 6).toString('ascii')) // 'def' -console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' -console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' - -// or just use toString! -console.log(bl.toString()) // 'abcdefghij\u0003\u0004' -console.log(bl.toString('ascii', 3, 8)) // 'defgh' -console.log(bl.toString('ascii', 5, 10)) // 'fghij' - -// other standard Buffer readables -console.log(bl.readUInt16BE(10)) // 0x0304 -console.log(bl.readUInt16LE(10)) // 0x0403 -``` - -Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: - -```js -const bl = require('bl') - , fs = require('fs') - -fs.createReadStream('README.md') - .pipe(bl(function (err, data) { // note 'new' isn't strictly required - // `data` is a complete Buffer object containing the full data - console.log(data.toString()) - })) -``` - -Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. - -Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): -```js -const hyperquest = require('hyperquest') - , bl = require('bl') - , url = 'https://raw.github.com/rvagg/bl/master/README.md' - -hyperquest(url).pipe(bl(function (err, data) { - console.log(data.toString()) -})) -``` - -Or, use it as a readable stream to recompose a list of Buffers to an output source: - -```js -const BufferList = require('bl') - , fs = require('fs') - -var bl = new BufferList() -bl.append(new Buffer('abcd')) -bl.append(new Buffer('efg')) -bl.append(new Buffer('hi')) -bl.append(new Buffer('j')) - -bl.pipe(fs.createWriteStream('gibberish.txt')) -``` - -## API - - * new BufferList([ callback ]) - * bl.length - * bl.append(buffer) - * bl.get(index) - * bl.slice([ start[, end ] ]) - * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) - * bl.duplicate() - * bl.consume(bytes) - * bl.toString([encoding, [ start, [ end ]]]) - * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() - * Streams - --------------------------------------------------------- - -### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) -The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. - -Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. - -`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: - -```js -var bl = require('bl') -var myinstance = bl() - -// equivilant to: - -var BufferList = require('bl') -var myinstance = new BufferList() -``` - --------------------------------------------------------- - -### bl.length -Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. - --------------------------------------------------------- - -### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) -`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. - --------------------------------------------------------- - -### bl.get(index) -`get()` will return the byte at the specified index. - --------------------------------------------------------- - -### bl.slice([ start, [ end ] ]) -`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. - -If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. - --------------------------------------------------------- - -### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) -`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. - --------------------------------------------------------- - -### bl.duplicate() -`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: - -```js -var bl = new BufferList() - -bl.append('hello') -bl.append(' world') -bl.append('\n') - -bl.duplicate().pipe(process.stdout, { end: false }) - -console.log(bl.toString()) -``` - --------------------------------------------------------- - -### bl.consume(bytes) -`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. - --------------------------------------------------------- - -### bl.toString([encoding, [ start, [ end ]]]) -`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. - --------------------------------------------------------- - -### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() - -All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. - -See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. - --------------------------------------------------------- - -### Streams -**bl** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **bl** instance. - --------------------------------------------------------- - -## Contributors - -**bl** is brought to you by the following hackers: - - * [Rod Vagg](https://github.com/rvagg) - * [Matteo Collina](https://github.com/mcollina) - * [Jarett Cruger](https://github.com/jcrugzz) - -======= - - -## License & copyright - -Copyright (c) 2013-2014 bl contributors (listed above). - -bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/deps/npm/node_modules/request/node_modules/bl/bl.js b/deps/npm/node_modules/request/node_modules/bl/bl.js deleted file mode 100644 index f585df1721798f..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/bl.js +++ /dev/null @@ -1,243 +0,0 @@ -var DuplexStream = require('readable-stream/duplex') - , util = require('util') - - -function BufferList (callback) { - if (!(this instanceof BufferList)) - return new BufferList(callback) - - this._bufs = [] - this.length = 0 - - if (typeof callback == 'function') { - this._callback = callback - - var piper = function piper (err) { - if (this._callback) { - this._callback(err) - this._callback = null - } - }.bind(this) - - this.on('pipe', function onPipe (src) { - src.on('error', piper) - }) - this.on('unpipe', function onUnpipe (src) { - src.removeListener('error', piper) - }) - } else { - this.append(callback) - } - - DuplexStream.call(this) -} - - -util.inherits(BufferList, DuplexStream) - - -BufferList.prototype._offset = function _offset (offset) { - var tot = 0, i = 0, _t - for (; i < this._bufs.length; i++) { - _t = tot + this._bufs[i].length - if (offset < _t) - return [ i, offset - tot ] - tot = _t - } -} - - -BufferList.prototype.append = function append (buf) { - var i = 0 - , newBuf - - if (Array.isArray(buf)) { - for (; i < buf.length; i++) - this.append(buf[i]) - } else if (buf instanceof BufferList) { - // unwrap argument into individual BufferLists - for (; i < buf._bufs.length; i++) - this.append(buf._bufs[i]) - } else if (buf != null) { - // coerce number arguments to strings, since Buffer(number) does - // uninitialized memory allocation - if (typeof buf == 'number') - buf = buf.toString() - - newBuf = Buffer.isBuffer(buf) ? buf : new Buffer(buf) - this._bufs.push(newBuf) - this.length += newBuf.length - } - - return this -} - - -BufferList.prototype._write = function _write (buf, encoding, callback) { - this.append(buf) - - if (typeof callback == 'function') - callback() -} - - -BufferList.prototype._read = function _read (size) { - if (!this.length) - return this.push(null) - - size = Math.min(size, this.length) - this.push(this.slice(0, size)) - this.consume(size) -} - - -BufferList.prototype.end = function end (chunk) { - DuplexStream.prototype.end.call(this, chunk) - - if (this._callback) { - this._callback(null, this.slice()) - this._callback = null - } -} - - -BufferList.prototype.get = function get (index) { - return this.slice(index, index + 1)[0] -} - - -BufferList.prototype.slice = function slice (start, end) { - return this.copy(null, 0, start, end) -} - - -BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { - if (typeof srcStart != 'number' || srcStart < 0) - srcStart = 0 - if (typeof srcEnd != 'number' || srcEnd > this.length) - srcEnd = this.length - if (srcStart >= this.length) - return dst || new Buffer(0) - if (srcEnd <= 0) - return dst || new Buffer(0) - - var copy = !!dst - , off = this._offset(srcStart) - , len = srcEnd - srcStart - , bytes = len - , bufoff = (copy && dstStart) || 0 - , start = off[1] - , l - , i - - // copy/slice everything - if (srcStart === 0 && srcEnd == this.length) { - if (!copy) // slice, just return a full concat - return Buffer.concat(this._bufs) - - // copy, need to copy individual buffers - for (i = 0; i < this._bufs.length; i++) { - this._bufs[i].copy(dst, bufoff) - bufoff += this._bufs[i].length - } - - return dst - } - - // easy, cheap case where it's a subset of one of the buffers - if (bytes <= this._bufs[off[0]].length - start) { - return copy - ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) - : this._bufs[off[0]].slice(start, start + bytes) - } - - if (!copy) // a slice, we need something to copy in to - dst = new Buffer(len) - - for (i = off[0]; i < this._bufs.length; i++) { - l = this._bufs[i].length - start - - if (bytes > l) { - this._bufs[i].copy(dst, bufoff, start) - } else { - this._bufs[i].copy(dst, bufoff, start, start + bytes) - break - } - - bufoff += l - bytes -= l - - if (start) - start = 0 - } - - return dst -} - -BufferList.prototype.toString = function toString (encoding, start, end) { - return this.slice(start, end).toString(encoding) -} - -BufferList.prototype.consume = function consume (bytes) { - while (this._bufs.length) { - if (bytes >= this._bufs[0].length) { - bytes -= this._bufs[0].length - this.length -= this._bufs[0].length - this._bufs.shift() - } else { - this._bufs[0] = this._bufs[0].slice(bytes) - this.length -= bytes - break - } - } - return this -} - - -BufferList.prototype.duplicate = function duplicate () { - var i = 0 - , copy = new BufferList() - - for (; i < this._bufs.length; i++) - copy.append(this._bufs[i]) - - return copy -} - - -BufferList.prototype.destroy = function destroy () { - this._bufs.length = 0 - this.length = 0 - this.push(null) -} - - -;(function () { - var methods = { - 'readDoubleBE' : 8 - , 'readDoubleLE' : 8 - , 'readFloatBE' : 4 - , 'readFloatLE' : 4 - , 'readInt32BE' : 4 - , 'readInt32LE' : 4 - , 'readUInt32BE' : 4 - , 'readUInt32LE' : 4 - , 'readInt16BE' : 2 - , 'readInt16LE' : 2 - , 'readUInt16BE' : 2 - , 'readUInt16LE' : 2 - , 'readInt8' : 1 - , 'readUInt8' : 1 - } - - for (var m in methods) { - (function (m) { - BufferList.prototype[m] = function (offset) { - return this.slice(offset, offset + methods[m])[m](0) - } - }(m)) - } -}()) - - -module.exports = BufferList diff --git a/deps/npm/node_modules/request/node_modules/bl/package.json b/deps/npm/node_modules/request/node_modules/bl/package.json deleted file mode 100644 index 47862b0b623691..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/package.json +++ /dev/null @@ -1,100 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "bl@~1.1.2", - "scope": null, - "escapedName": "bl", - "name": "bl", - "rawSpec": "~1.1.2", - "spec": ">=1.1.2 <1.2.0", - "type": "range" - }, - "/Users/rebecca/code/npm/node_modules/request" - ] - ], - "_from": "bl@>=1.1.2 <1.2.0", - "_id": "bl@1.1.2", - "_inCache": true, - "_installable": true, - "_location": "/request/bl", - "_nodeVersion": "5.3.0", - "_npmOperationalInternal": { - "host": "packages-9-west.internal.npmjs.com", - "tmp": "tmp/bl-1.1.2.tgz_1455246621698_0.6300242957659066" - }, - "_npmUser": { - "name": "rvagg", - "email": "rod@vagg.org" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": { - "inherits": "2.0.1" - }, - "_requested": { - "raw": "bl@~1.1.2", - "scope": null, - "escapedName": "bl", - "name": "bl", - "rawSpec": "~1.1.2", - "spec": ">=1.1.2 <1.2.0", - "type": "range" - }, - "_requiredBy": [ - "/request" - ], - "_resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz", - "_shasum": "fdca871a99713aa00d19e3bbba41c44787a65398", - "_shrinkwrap": null, - "_spec": "bl@~1.1.2", - "_where": "/Users/rebecca/code/npm/node_modules/request", - "authors": [ - "Rod Vagg (https://github.com/rvagg)", - "Matteo Collina (https://github.com/mcollina)", - "Jarett Cruger (https://github.com/jcrugzz)" - ], - "bugs": { - "url": "https://github.com/rvagg/bl/issues" - }, - "dependencies": { - "readable-stream": "~2.0.5" - }, - "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", - "devDependencies": { - "faucet": "0.0.1", - "hash_file": "~0.1.1", - "tape": "~4.4.0" - }, - "directories": {}, - "dist": { - "shasum": "fdca871a99713aa00d19e3bbba41c44787a65398", - "tarball": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz" - }, - "gitHead": "ea42021059dc65fc60d7f4b9217c73431f09d23d", - "homepage": "https://github.com/rvagg/bl", - "keywords": [ - "buffer", - "buffers", - "stream", - "awesomesauce" - ], - "license": "MIT", - "main": "bl.js", - "maintainers": [ - { - "name": "rvagg", - "email": "rod@vagg.org" - } - ], - "name": "bl", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/rvagg/bl.git" - }, - "scripts": { - "test": "node test/test.js | faucet" - }, - "version": "1.1.2" -} diff --git a/deps/npm/node_modules/request/node_modules/bl/test/test.js b/deps/npm/node_modules/request/node_modules/bl/test/test.js deleted file mode 100644 index c95b1ba4844ef7..00000000000000 --- a/deps/npm/node_modules/request/node_modules/bl/test/test.js +++ /dev/null @@ -1,640 +0,0 @@ -var tape = require('tape') - , crypto = require('crypto') - , fs = require('fs') - , hash = require('hash_file') - , BufferList = require('../') - - , encodings = - ('hex utf8 utf-8 ascii binary base64' - + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') - -tape('single bytes from single buffer', function (t) { - var bl = new BufferList() - bl.append(new Buffer('abcd')) - - t.equal(bl.length, 4) - - t.equal(bl.get(0), 97) - t.equal(bl.get(1), 98) - t.equal(bl.get(2), 99) - t.equal(bl.get(3), 100) - - t.end() -}) - -tape('single bytes from multiple buffers', function (t) { - var bl = new BufferList() - bl.append(new Buffer('abcd')) - bl.append(new Buffer('efg')) - bl.append(new Buffer('hi')) - bl.append(new Buffer('j')) - - t.equal(bl.length, 10) - - t.equal(bl.get(0), 97) - t.equal(bl.get(1), 98) - t.equal(bl.get(2), 99) - t.equal(bl.get(3), 100) - t.equal(bl.get(4), 101) - t.equal(bl.get(5), 102) - t.equal(bl.get(6), 103) - t.equal(bl.get(7), 104) - t.equal(bl.get(8), 105) - t.equal(bl.get(9), 106) - t.end() -}) - -tape('multi bytes from single buffer', function (t) { - var bl = new BufferList() - bl.append(new Buffer('abcd')) - - t.equal(bl.length, 4) - - t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') - t.equal(bl.slice(0, 3).toString('ascii'), 'abc') - t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') - - t.end() -}) - -tape('multiple bytes from multiple buffers', function (t) { - var bl = new BufferList() - - bl.append(new Buffer('abcd')) - bl.append(new Buffer('efg')) - bl.append(new Buffer('hi')) - bl.append(new Buffer('j')) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - - t.end() -}) - -tape('multiple bytes from multiple buffer lists', function (t) { - var bl = new BufferList() - - bl.append(new BufferList([ new Buffer('abcd'), new Buffer('efg') ])) - bl.append(new BufferList([ new Buffer('hi'), new Buffer('j') ])) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - - t.end() -}) - -// same data as previous test, just using nested constructors -tape('multiple bytes from crazy nested buffer lists', function (t) { - var bl = new BufferList() - - bl.append(new BufferList([ - new BufferList([ - new BufferList(new Buffer('abc')) - , new Buffer('d') - , new BufferList(new Buffer('efg')) - ]) - , new BufferList([ new Buffer('hi') ]) - , new BufferList(new Buffer('j')) - ])) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') - t.equal(bl.slice(3, 6).toString('ascii'), 'def') - t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') - t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') - - t.end() -}) - -tape('append accepts arrays of Buffers', function (t) { - var bl = new BufferList() - bl.append(new Buffer('abc')) - bl.append([ new Buffer('def') ]) - bl.append([ new Buffer('ghi'), new Buffer('jkl') ]) - bl.append([ new Buffer('mnop'), new Buffer('qrstu'), new Buffer('vwxyz') ]) - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - t.end() -}) - -tape('append accepts arrays of BufferLists', function (t) { - var bl = new BufferList() - bl.append(new Buffer('abc')) - bl.append([ new BufferList('def') ]) - bl.append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ])) - bl.append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ]) - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - t.end() -}) - -tape('append chainable', function (t) { - var bl = new BufferList() - t.ok(bl.append(new Buffer('abcd')) === bl) - t.ok(bl.append([ new Buffer('abcd') ]) === bl) - t.ok(bl.append(new BufferList(new Buffer('abcd'))) === bl) - t.ok(bl.append([ new BufferList(new Buffer('abcd')) ]) === bl) - t.end() -}) - -tape('append chainable (test results)', function (t) { - var bl = new BufferList('abc') - .append([ new BufferList('def') ]) - .append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ])) - .append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ]) - - t.equal(bl.length, 26) - t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') - t.end() -}) - -tape('consuming from multiple buffers', function (t) { - var bl = new BufferList() - - bl.append(new Buffer('abcd')) - bl.append(new Buffer('efg')) - bl.append(new Buffer('hi')) - bl.append(new Buffer('j')) - - t.equal(bl.length, 10) - - t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') - - bl.consume(3) - t.equal(bl.length, 7) - t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') - - bl.consume(2) - t.equal(bl.length, 5) - t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') - - bl.consume(1) - t.equal(bl.length, 4) - t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') - - bl.consume(1) - t.equal(bl.length, 3) - t.equal(bl.slice(0, 3).toString('ascii'), 'hij') - - bl.consume(2) - t.equal(bl.length, 1) - t.equal(bl.slice(0, 1).toString('ascii'), 'j') - - t.end() -}) - -tape('complete consumption', function (t) { - var bl = new BufferList() - - bl.append(new Buffer('a')) - bl.append(new Buffer('b')) - - bl.consume(2) - - t.equal(bl.length, 0) - t.equal(bl._bufs.length, 0) - - t.end() -}) - -tape('test readUInt8 / readInt8', function (t) { - var buf1 = new Buffer(1) - , buf2 = new Buffer(3) - , buf3 = new Buffer(3) - , bl = new BufferList() - - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt8(2), 0x3) - t.equal(bl.readInt8(2), 0x3) - t.equal(bl.readUInt8(3), 0x4) - t.equal(bl.readInt8(3), 0x4) - t.equal(bl.readUInt8(4), 0x23) - t.equal(bl.readInt8(4), 0x23) - t.equal(bl.readUInt8(5), 0x42) - t.equal(bl.readInt8(5), 0x42) - t.end() -}) - -tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { - var buf1 = new Buffer(1) - , buf2 = new Buffer(3) - , buf3 = new Buffer(3) - , bl = new BufferList() - - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt16BE(2), 0x0304) - t.equal(bl.readUInt16LE(2), 0x0403) - t.equal(bl.readInt16BE(2), 0x0304) - t.equal(bl.readInt16LE(2), 0x0403) - t.equal(bl.readUInt16BE(3), 0x0423) - t.equal(bl.readUInt16LE(3), 0x2304) - t.equal(bl.readInt16BE(3), 0x0423) - t.equal(bl.readInt16LE(3), 0x2304) - t.equal(bl.readUInt16BE(4), 0x2342) - t.equal(bl.readUInt16LE(4), 0x4223) - t.equal(bl.readInt16BE(4), 0x2342) - t.equal(bl.readInt16LE(4), 0x4223) - t.end() -}) - -tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { - var buf1 = new Buffer(1) - , buf2 = new Buffer(3) - , buf3 = new Buffer(3) - , bl = new BufferList() - - buf2[1] = 0x3 - buf2[2] = 0x4 - buf3[0] = 0x23 - buf3[1] = 0x42 - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readUInt32BE(2), 0x03042342) - t.equal(bl.readUInt32LE(2), 0x42230403) - t.equal(bl.readInt32BE(2), 0x03042342) - t.equal(bl.readInt32LE(2), 0x42230403) - t.end() -}) - -tape('test readFloatLE / readFloatBE', function (t) { - var buf1 = new Buffer(1) - , buf2 = new Buffer(3) - , buf3 = new Buffer(3) - , bl = new BufferList() - - buf2[1] = 0x00 - buf2[2] = 0x00 - buf3[0] = 0x80 - buf3[1] = 0x3f - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readFloatLE(2), 0x01) - t.end() -}) - -tape('test readDoubleLE / readDoubleBE', function (t) { - var buf1 = new Buffer(1) - , buf2 = new Buffer(3) - , buf3 = new Buffer(10) - , bl = new BufferList() - - buf2[1] = 0x55 - buf2[2] = 0x55 - buf3[0] = 0x55 - buf3[1] = 0x55 - buf3[2] = 0x55 - buf3[3] = 0x55 - buf3[4] = 0xd5 - buf3[5] = 0x3f - - bl.append(buf1) - bl.append(buf2) - bl.append(buf3) - - t.equal(bl.readDoubleLE(2), 0.3333333333333333) - t.end() -}) - -tape('test toString', function (t) { - var bl = new BufferList() - - bl.append(new Buffer('abcd')) - bl.append(new Buffer('efg')) - bl.append(new Buffer('hi')) - bl.append(new Buffer('j')) - - t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') - t.equal(bl.toString('ascii', 3, 10), 'defghij') - t.equal(bl.toString('ascii', 3, 6), 'def') - t.equal(bl.toString('ascii', 3, 8), 'defgh') - t.equal(bl.toString('ascii', 5, 10), 'fghij') - - t.end() -}) - -tape('test toString encoding', function (t) { - var bl = new BufferList() - , b = new Buffer('abcdefghij\xff\x00') - - bl.append(new Buffer('abcd')) - bl.append(new Buffer('efg')) - bl.append(new Buffer('hi')) - bl.append(new Buffer('j')) - bl.append(new Buffer('\xff\x00')) - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc), enc) - }) - - t.end() -}) - -!process.browser && tape('test stream', function (t) { - var random = crypto.randomBytes(65534) - , rndhash = hash(random, 'md5') - , md5sum = crypto.createHash('md5') - , bl = new BufferList(function (err, buf) { - t.ok(Buffer.isBuffer(buf)) - t.ok(err === null) - t.equal(rndhash, hash(bl.slice(), 'md5')) - t.equal(rndhash, hash(buf, 'md5')) - - bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) - .on('close', function () { - var s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') - s.on('data', md5sum.update.bind(md5sum)) - s.on('end', function() { - t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') - t.end() - }) - }) - - }) - - fs.writeFileSync('/tmp/bl_test_rnd.dat', random) - fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) -}) - -tape('instantiation with Buffer', function (t) { - var buf = crypto.randomBytes(1024) - , buf2 = crypto.randomBytes(1024) - , b = BufferList(buf) - - t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') - b = BufferList([ buf, buf2 ]) - t.equal(b.slice().toString('hex'), Buffer.concat([ buf, buf2 ]).toString('hex'), 'same buffer') - t.end() -}) - -tape('test String appendage', function (t) { - var bl = new BufferList() - , b = new Buffer('abcdefghij\xff\x00') - - bl.append('abcd') - bl.append('efg') - bl.append('hi') - bl.append('j') - bl.append('\xff\x00') - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc)) - }) - - t.end() -}) - -tape('test Number appendage', function (t) { - var bl = new BufferList() - , b = new Buffer('1234567890') - - bl.append(1234) - bl.append(567) - bl.append(89) - bl.append(0) - - encodings.forEach(function (enc) { - t.equal(bl.toString(enc), b.toString(enc)) - }) - - t.end() -}) - -tape('write nothing, should get empty buffer', function (t) { - t.plan(3) - BufferList(function (err, data) { - t.notOk(err, 'no error') - t.ok(Buffer.isBuffer(data), 'got a buffer') - t.equal(0, data.length, 'got a zero-length buffer') - t.end() - }).end() -}) - -tape('unicode string', function (t) { - t.plan(2) - var inp1 = '\u2600' - , inp2 = '\u2603' - , exp = inp1 + ' and ' + inp2 - , bl = BufferList() - bl.write(inp1) - bl.write(' and ') - bl.write(inp2) - t.equal(exp, bl.toString()) - t.equal(new Buffer(exp).toString('hex'), bl.toString('hex')) -}) - -tape('should emit finish', function (t) { - var source = BufferList() - , dest = BufferList() - - source.write('hello') - source.pipe(dest) - - dest.on('finish', function () { - t.equal(dest.toString('utf8'), 'hello') - t.end() - }) -}) - -tape('basic copy', function (t) { - var buf = crypto.randomBytes(1024) - , buf2 = new Buffer(1024) - , b = BufferList(buf) - - b.copy(buf2) - t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') - t.end() -}) - -tape('copy after many appends', function (t) { - var buf = crypto.randomBytes(512) - , buf2 = new Buffer(1024) - , b = BufferList(buf) - - b.append(buf) - b.copy(buf2) - t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') - t.end() -}) - -tape('copy at a precise position', function (t) { - var buf = crypto.randomBytes(1004) - , buf2 = new Buffer(1024) - , b = BufferList(buf) - - b.copy(buf2, 20) - t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') - t.end() -}) - -tape('copy starting from a precise location', function (t) { - var buf = crypto.randomBytes(10) - , buf2 = new Buffer(5) - , b = BufferList(buf) - - b.copy(buf2, 0, 5) - t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') - t.end() -}) - -tape('copy in an interval', function (t) { - var rnd = crypto.randomBytes(10) - , b = BufferList(rnd) // put the random bytes there - , actual = new Buffer(3) - , expected = new Buffer(3) - - rnd.copy(expected, 0, 5, 8) - b.copy(actual, 0, 5, 8) - - t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') - t.end() -}) - -tape('copy an interval between two buffers', function (t) { - var buf = crypto.randomBytes(10) - , buf2 = new Buffer(10) - , b = BufferList(buf) - - b.append(buf) - b.copy(buf2, 0, 5, 15) - - t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') - t.end() -}) - -tape('duplicate', function (t) { - t.plan(2) - - var bl = new BufferList('abcdefghij\xff\x00') - , dup = bl.duplicate() - - t.equal(bl.prototype, dup.prototype) - t.equal(bl.toString('hex'), dup.toString('hex')) -}) - -tape('destroy no pipe', function (t) { - t.plan(2) - - var bl = new BufferList('alsdkfja;lsdkfja;lsdk') - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) -}) - -!process.browser && tape('destroy with pipe before read end', function (t) { - t.plan(2) - - var bl = new BufferList() - fs.createReadStream(__dirname + '/test.js') - .pipe(bl) - - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) - -}) - -!process.browser && tape('destroy with pipe before read end with race', function (t) { - t.plan(2) - - var bl = new BufferList() - fs.createReadStream(__dirname + '/test.js') - .pipe(bl) - - setTimeout(function () { - bl.destroy() - setTimeout(function () { - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) - }, 500) - }, 500) -}) - -!process.browser && tape('destroy with pipe after read end', function (t) { - t.plan(2) - - var bl = new BufferList() - fs.createReadStream(__dirname + '/test.js') - .on('end', onEnd) - .pipe(bl) - - function onEnd () { - bl.destroy() - - t.equal(bl._bufs.length, 0) - t.equal(bl.length, 0) - } -}) - -!process.browser && tape('destroy with pipe while writing to a destination', function (t) { - t.plan(4) - - var bl = new BufferList() - , ds = new BufferList() - - fs.createReadStream(__dirname + '/test.js') - .on('end', onEnd) - .pipe(bl) - - function onEnd () { - bl.pipe(ds) - - setTimeout(function () { - bl.destroy() - - t.equals(bl._bufs.length, 0) - t.equals(bl.length, 0) - - ds.destroy() - - t.equals(bl._bufs.length, 0) - t.equals(bl.length, 0) - - }, 100) - } -}) - -!process.browser && tape('handle error', function (t) { - t.plan(2) - fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { - t.ok(err instanceof Error, 'has error') - t.notOk(data, 'no data') - })) -}) diff --git a/deps/npm/node_modules/request/node_modules/form-data/Readme.md b/deps/npm/node_modules/request/node_modules/form-data/Readme.md index 5864d82db56862..642a9d14a800b7 100644 --- a/deps/npm/node_modules/request/node_modules/form-data/Readme.md +++ b/deps/npm/node_modules/request/node_modules/form-data/Readme.md @@ -6,11 +6,11 @@ The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface] [xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface -[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.0.0.svg?label=linux:0.12-6.x)](https://travis-ci.org/form-data/form-data) -[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.0.0.svg?label=macos:0.12-6.x)](https://travis-ci.org/form-data/form-data) -[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.0.0.svg?label=windows:0.12-6.x)](https://ci.appveyor.com/project/alexindigo/form-data) +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=linux:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.1.2.svg?label=macos:0.12-6.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/form-data/v2.1.2.svg?label=windows:0.12-6.x)](https://ci.appveyor.com/project/alexindigo/form-data) -[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.1.2.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) [![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) [![bitHound Overall Score](https://www.bithound.io/github/form-data/form-data/badges/score.svg)](https://www.bithound.io/github/form-data/form-data) diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js index f42221cd63da73..09e7c70e6e9d78 100644 --- a/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js +++ b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js @@ -1,2 +1,2 @@ /* eslint-env browser */ -module.exports = window.FormData; +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js index 8252e6588d4949..d2cc924918b353 100644 --- a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js +++ b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js @@ -325,7 +325,7 @@ FormData.prototype.getLengthSync = function() { } // https://github.com/form-data/form-data/issues/40 - if (this._valuesToMeasure.length) { + if (!this.hasKnownLength()) { // Some async length retrievers are present // therefore synchronous length calculation is false. // Please use getLength(callback) to get proper length @@ -335,6 +335,19 @@ FormData.prototype.getLengthSync = function() { return knownLength; }; +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + FormData.prototype.getLength = function(cb) { var knownLength = this._overheadLength + this._valueLength; diff --git a/deps/npm/node_modules/request/node_modules/form-data/package.json b/deps/npm/node_modules/request/node_modules/form-data/package.json index a8b4839b7c7e69..96fd69e80f180d 100644 --- a/deps/npm/node_modules/request/node_modules/form-data/package.json +++ b/deps/npm/node_modules/request/node_modules/form-data/package.json @@ -2,49 +2,49 @@ "_args": [ [ { - "raw": "form-data@~2.0.0", + "raw": "form-data@~2.1.1", "scope": null, "escapedName": "form-data", "name": "form-data", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", + "rawSpec": "~2.1.1", + "spec": ">=2.1.1 <2.2.0", "type": "range" }, - "/Users/rebecca/code/npm/node_modules/request" + "/Users/zkat/Documents/code/npm/node_modules/request" ] ], - "_from": "form-data@>=2.0.0 <2.1.0", - "_id": "form-data@2.0.0", + "_from": "form-data@>=2.1.1 <2.2.0", + "_id": "form-data@2.1.2", "_inCache": true, "_location": "/request/form-data", - "_nodeVersion": "4.5.0", + "_nodeVersion": "6.4.0", "_npmOperationalInternal": { "host": "packages-12-west.internal.npmjs.com", - "tmp": "tmp/form-data-2.0.0.tgz_1474092617403_0.5404838663525879" + "tmp": "tmp/form-data-2.1.2.tgz_1478577739404_0.6574864208232611" }, "_npmUser": { "name": "alexindigo", "email": "iam@alexindigo.com" }, - "_npmVersion": "2.15.9", + "_npmVersion": "3.10.3", "_phantomChildren": {}, "_requested": { - "raw": "form-data@~2.0.0", + "raw": "form-data@~2.1.1", "scope": null, "escapedName": "form-data", "name": "form-data", - "rawSpec": "~2.0.0", - "spec": ">=2.0.0 <2.1.0", + "rawSpec": "~2.1.1", + "spec": ">=2.1.1 <2.2.0", "type": "range" }, "_requiredBy": [ "/request" ], - "_resolved": "https://registry.npmjs.org/form-data/-/form-data-2.0.0.tgz", - "_shasum": "6f0aebadcc5da16c13e1ecc11137d85f9b883b25", + "_resolved": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz", + "_shasum": "89c3534008b97eada4cbb157d58f6f5df025eae4", "_shrinkwrap": null, - "_spec": "form-data@~2.0.0", - "_where": "/Users/rebecca/code/npm/node_modules/request", + "_spec": "form-data@~2.1.1", + "_where": "/Users/zkat/Documents/code/npm/node_modules/request", "author": { "name": "Felix Geisendörfer", "email": "felix@debuggable.com", @@ -57,33 +57,38 @@ "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.5", - "mime-types": "^2.1.11" + "mime-types": "^2.1.12" }, "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", "devDependencies": { - "coveralls": "^2.11.13", - "cross-spawn": "^4.0.0", - "eslint": "^3.5.0", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^2.11.14", + "cross-spawn": "^4.0.2", + "eslint": "^3.9.1", "fake": "^0.2.2", "far": "^0.0.7", "formidable": "^1.0.17", "in-publish": "^2.0.0", "is-node-modern": "^1.0.0", "istanbul": "^0.4.5", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.13", "pkgfiles": "^2.3.0", "pre-commit": "^1.1.3", - "request": "^2.74.0", - "rimraf": "^2.5.4" + "request": "2.76.0", + "rimraf": "^2.5.4", + "tape": "^4.6.2" }, "directories": {}, "dist": { - "shasum": "6f0aebadcc5da16c13e1ecc11137d85f9b883b25", - "tarball": "https://registry.npmjs.org/form-data/-/form-data-2.0.0.tgz" + "shasum": "89c3534008b97eada4cbb157d58f6f5df025eae4", + "tarball": "https://registry.npmjs.org/form-data/-/form-data-2.1.2.tgz" }, "engines": { "node": ">= 0.12" }, - "gitHead": "652b16ff5b9077bdf65eb66b67286c823c2a1040", + "gitHead": "03444d21961a7a44cdc2eae11ee3630f6969023d", "homepage": "https://github.com/form-data/form-data#readme", "license": "MIT", "main": "./lib/form_data", @@ -109,7 +114,7 @@ "optionalDependencies": {}, "pre-commit": [ "lint", - "test", + "ci-test", "check" ], "readme": "ERROR: No README data found!", @@ -118,8 +123,10 @@ "url": "git://github.com/form-data/form-data.git" }, "scripts": { + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", "check": "istanbul check-coverage coverage/coverage*.json", - "ci-lint": "is-node-modern && npm run lint || is-node-not-modern", + "ci-lint": "is-node-modern 6 && npm run lint || is-node-not-modern 6", + "ci-test": "npm run test && npm run browser && npm run report", "debug": "verbose=1 ./test/run.js", "files": "pkgfiles --sort=name", "get-version": "node -e \"console.log(require('./package.json').version)\"", @@ -129,9 +136,10 @@ "predebug": "rimraf coverage test/tmp", "prepublish": "in-publish && npm run update-readme || not-in-publish", "pretest": "rimraf coverage test/tmp", + "report": "istanbul report lcov text", "restore-readme": "mv README.md.bak README.md", "test": "istanbul cover test/run.js", "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md" }, - "version": "2.0.0" + "version": "2.1.2" } diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/CHANGES.md b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/CHANGES.md new file mode 100644 index 00000000000000..d7c8f4ebe18de5 --- /dev/null +++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/CHANGES.md @@ -0,0 +1,364 @@ +# node-dashdash changelog + +## not yet released + +(nothing yet) + +## 1.14.1 + +- [issue #30] Change the output used by dashdash's Bash completion support to + indicate "there are no completions for this argument" to cope with different + sorting rules on different Bash/platforms. For example: + + $ triton -v -p test2 package get # before + ##-no -tritonpackage- completions-## + + $ triton -v -p test2 package get # after + ##-no-completion- -results-## + +## 1.14.0 + +- New `synopsisFromOpt(