From 0dc875e2c730221c349f35f2a7dd508f4732db0b Mon Sep 17 00:00:00 2001
From: Rebecca Turner 2.15.4 2.15.5 This is the API documentation for npm.
To find documentation of the command line
@@ -109,5 +109,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-bin.html b/deps/npm/html/doc/api/npm-bin.html
index 92508b71055afa..5e9484bded048b 100644
--- a/deps/npm/html/doc/api/npm-bin.html
+++ b/deps/npm/html/doc/api/npm-bin.html
@@ -28,5 +28,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-bugs.html b/deps/npm/html/doc/api/npm-bugs.html
index ab51478fc67917..ceb7572465e891 100644
--- a/deps/npm/html/doc/api/npm-bugs.html
+++ b/deps/npm/html/doc/api/npm-bugs.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-cache.html b/deps/npm/html/doc/api/npm-cache.html
index 5402f1f70bd79f..c90381500fd2ed 100644
--- a/deps/npm/html/doc/api/npm-cache.html
+++ b/deps/npm/html/doc/api/npm-cache.html
@@ -42,5 +42,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-commands.html b/deps/npm/html/doc/api/npm-commands.html
index e217fb88e7f63c..eb90c0ccb9adc4 100644
--- a/deps/npm/html/doc/api/npm-commands.html
+++ b/deps/npm/html/doc/api/npm-commands.html
@@ -36,5 +36,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-config.html b/deps/npm/html/doc/api/npm-config.html
index 43c24f25e3c07b..fabdef673d59ba 100644
--- a/deps/npm/html/doc/api/npm-config.html
+++ b/deps/npm/html/doc/api/npm-config.html
@@ -57,5 +57,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-deprecate.html b/deps/npm/html/doc/api/npm-deprecate.html
index 70f3d954bd337e..339ec069d0f4f7 100644
--- a/deps/npm/html/doc/api/npm-deprecate.html
+++ b/deps/npm/html/doc/api/npm-deprecate.html
@@ -47,5 +47,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-docs.html b/deps/npm/html/doc/api/npm-docs.html
index 8b987519a3226b..778d393fad29bb 100644
--- a/deps/npm/html/doc/api/npm-docs.html
+++ b/deps/npm/html/doc/api/npm-docs.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-edit.html b/deps/npm/html/doc/api/npm-edit.html
index 643a43f1920eeb..482f569b79955d 100644
--- a/deps/npm/html/doc/api/npm-edit.html
+++ b/deps/npm/html/doc/api/npm-edit.html
@@ -36,5 +36,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-explore.html b/deps/npm/html/doc/api/npm-explore.html
index 4032e47609e989..e1bd478647bce7 100644
--- a/deps/npm/html/doc/api/npm-explore.html
+++ b/deps/npm/html/doc/api/npm-explore.html
@@ -31,5 +31,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-help-search.html b/deps/npm/html/doc/api/npm-help-search.html
index 5872b0b96a8753..85665599b567c6 100644
--- a/deps/npm/html/doc/api/npm-help-search.html
+++ b/deps/npm/html/doc/api/npm-help-search.html
@@ -44,5 +44,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-init.html b/deps/npm/html/doc/api/npm-init.html
index 4c7be3d7320aef..c310c8ca6fbaee 100644
--- a/deps/npm/html/doc/api/npm-init.html
+++ b/deps/npm/html/doc/api/npm-init.html
@@ -39,5 +39,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-install.html b/deps/npm/html/doc/api/npm-install.html
index 4cced5e4991142..de8ee12446aa10 100644
--- a/deps/npm/html/doc/api/npm-install.html
+++ b/deps/npm/html/doc/api/npm-install.html
@@ -32,5 +32,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-link.html b/deps/npm/html/doc/api/npm-link.html
index 9238174344ae1a..54f2cabf9a8b5b 100644
--- a/deps/npm/html/doc/api/npm-link.html
+++ b/deps/npm/html/doc/api/npm-link.html
@@ -42,5 +42,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-load.html b/deps/npm/html/doc/api/npm-load.html
index 1cde9d426b5bee..f64db440acb7a6 100644
--- a/deps/npm/html/doc/api/npm-load.html
+++ b/deps/npm/html/doc/api/npm-load.html
@@ -37,5 +37,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-ls.html b/deps/npm/html/doc/api/npm-ls.html
index 21b609bc039f6e..bfeed4581fba16 100644
--- a/deps/npm/html/doc/api/npm-ls.html
+++ b/deps/npm/html/doc/api/npm-ls.html
@@ -63,5 +63,5 @@ global
-
+
diff --git a/deps/npm/html/doc/api/npm-outdated.html b/deps/npm/html/doc/api/npm-outdated.html
index 220815c0125106..2d5f6aad329703 100644
--- a/deps/npm/html/doc/api/npm-outdated.html
+++ b/deps/npm/html/doc/api/npm-outdated.html
@@ -28,5 +28,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-owner.html b/deps/npm/html/doc/api/npm-owner.html
index 8aa24c3838b74e..36ee9ebba72f7b 100644
--- a/deps/npm/html/doc/api/npm-owner.html
+++ b/deps/npm/html/doc/api/npm-owner.html
@@ -47,5 +47,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-pack.html b/deps/npm/html/doc/api/npm-pack.html
index 6ec85a565e7f2f..288386888da17c 100644
--- a/deps/npm/html/doc/api/npm-pack.html
+++ b/deps/npm/html/doc/api/npm-pack.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-ping.html b/deps/npm/html/doc/api/npm-ping.html
index 4cad6f837c30fa..90eb85a552dfb3 100644
--- a/deps/npm/html/doc/api/npm-ping.html
+++ b/deps/npm/html/doc/api/npm-ping.html
@@ -29,4 +29,4 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-prefix.html b/deps/npm/html/doc/api/npm-prefix.html
index c96bf086e37cae..be18dea7f53cc3 100644
--- a/deps/npm/html/doc/api/npm-prefix.html
+++ b/deps/npm/html/doc/api/npm-prefix.html
@@ -29,5 +29,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-prune.html b/deps/npm/html/doc/api/npm-prune.html
index c2ac7242c53941..3bbe302223aa8d 100644
--- a/deps/npm/html/doc/api/npm-prune.html
+++ b/deps/npm/html/doc/api/npm-prune.html
@@ -30,5 +30,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-publish.html b/deps/npm/html/doc/api/npm-publish.html
index 41f7f13531b193..5c353abc099306 100644
--- a/deps/npm/html/doc/api/npm-publish.html
+++ b/deps/npm/html/doc/api/npm-publish.html
@@ -46,5 +46,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-rebuild.html b/deps/npm/html/doc/api/npm-rebuild.html
index 232e6b11ca9f3b..884c10ec60c1a2 100644
--- a/deps/npm/html/doc/api/npm-rebuild.html
+++ b/deps/npm/html/doc/api/npm-rebuild.html
@@ -30,5 +30,5 @@ CONFIGURATION
-
+
diff --git a/deps/npm/html/doc/api/npm-repo.html b/deps/npm/html/doc/api/npm-repo.html
index 58c0b0aa4d8219..7fcc7626a51d01 100644
--- a/deps/npm/html/doc/api/npm-repo.html
+++ b/deps/npm/html/doc/api/npm-repo.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-restart.html b/deps/npm/html/doc/api/npm-restart.html
index 3fd463db014c05..b07e623adb2ff9 100644
--- a/deps/npm/html/doc/api/npm-restart.html
+++ b/deps/npm/html/doc/api/npm-restart.html
@@ -52,5 +52,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-root.html b/deps/npm/html/doc/api/npm-root.html
index c7c8d1b8c1d534..8fdd3750988465 100644
--- a/deps/npm/html/doc/api/npm-root.html
+++ b/deps/npm/html/doc/api/npm-root.html
@@ -29,5 +29,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-run-script.html b/deps/npm/html/doc/api/npm-run-script.html
index f00acb829a0e89..84e66fad817de1 100644
--- a/deps/npm/html/doc/api/npm-run-script.html
+++ b/deps/npm/html/doc/api/npm-run-script.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-search.html b/deps/npm/html/doc/api/npm-search.html
index 571e747ba0ddcf..c0bbc25f201693 100644
--- a/deps/npm/html/doc/api/npm-search.html
+++ b/deps/npm/html/doc/api/npm-search.html
@@ -53,5 +53,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-shrinkwrap.html b/deps/npm/html/doc/api/npm-shrinkwrap.html
index 82d7c1bc186f99..dbeaf23889dddd 100644
--- a/deps/npm/html/doc/api/npm-shrinkwrap.html
+++ b/deps/npm/html/doc/api/npm-shrinkwrap.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-start.html b/deps/npm/html/doc/api/npm-start.html
index 474b7babc04d68..7bc6170b299683 100644
--- a/deps/npm/html/doc/api/npm-start.html
+++ b/deps/npm/html/doc/api/npm-start.html
@@ -28,5 +28,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-stop.html b/deps/npm/html/doc/api/npm-stop.html
index f6ba2a13c8258a..68cd39101cc72e 100644
--- a/deps/npm/html/doc/api/npm-stop.html
+++ b/deps/npm/html/doc/api/npm-stop.html
@@ -28,5 +28,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-tag.html b/deps/npm/html/doc/api/npm-tag.html
index 87826bf6177d9e..f6f0e0db6d93fc 100644
--- a/deps/npm/html/doc/api/npm-tag.html
+++ b/deps/npm/html/doc/api/npm-tag.html
@@ -36,5 +36,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-test.html b/deps/npm/html/doc/api/npm-test.html
index a0c785bd2d014c..3aabf3f7e14062 100644
--- a/deps/npm/html/doc/api/npm-test.html
+++ b/deps/npm/html/doc/api/npm-test.html
@@ -30,5 +30,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-uninstall.html b/deps/npm/html/doc/api/npm-uninstall.html
index 2fadafbf0424fd..9cbb4c391eacb4 100644
--- a/deps/npm/html/doc/api/npm-uninstall.html
+++ b/deps/npm/html/doc/api/npm-uninstall.html
@@ -30,5 +30,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-unpublish.html b/deps/npm/html/doc/api/npm-unpublish.html
index 025a484f6b2b88..c1cf291e09483a 100644
--- a/deps/npm/html/doc/api/npm-unpublish.html
+++ b/deps/npm/html/doc/api/npm-unpublish.html
@@ -33,5 +33,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-update.html b/deps/npm/html/doc/api/npm-update.html
index 16c099176b286d..df80fc3a8f9717 100644
--- a/deps/npm/html/doc/api/npm-update.html
+++ b/deps/npm/html/doc/api/npm-update.html
@@ -33,5 +33,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/api/npm-version.html b/deps/npm/html/doc/api/npm-version.html
index fdae63fbfa38d2..b8fda6c4994f8b 100644
--- a/deps/npm/html/doc/api/npm-version.html
+++ b/deps/npm/html/doc/api/npm-version.html
@@ -32,5 +32,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm-view.html b/deps/npm/html/doc/api/npm-view.html
index f2935891695290..f7833d5d166ac1 100644
--- a/deps/npm/html/doc/api/npm-view.html
+++ b/deps/npm/html/doc/api/npm-view.html
@@ -81,5 +81,5 @@ RETURN VALUE
-
+
diff --git a/deps/npm/html/doc/api/npm-whoami.html b/deps/npm/html/doc/api/npm-whoami.html
index 04a53c4c5ba7ec..0dcecdc78908ea 100644
--- a/deps/npm/html/doc/api/npm-whoami.html
+++ b/deps/npm/html/doc/api/npm-whoami.html
@@ -29,5 +29,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/api/npm.html b/deps/npm/html/doc/api/npm.html
index c4f26fea2b5717..7c1a288eabf5de 100644
--- a/deps/npm/html/doc/api/npm.html
+++ b/deps/npm/html/doc/api/npm.html
@@ -23,7 +23,7 @@ SYNOPSIS
npm.commands.install(["package"], cb)
})
VERSION
-DESCRIPTION
ABBREVS
-
+
diff --git a/deps/npm/html/doc/cli/npm-access.html b/deps/npm/html/doc/cli/npm-access.html
index b46acd99516f2a..5d0547543dd34c 100644
--- a/deps/npm/html/doc/cli/npm-access.html
+++ b/deps/npm/html/doc/cli/npm-access.html
@@ -84,5 +84,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-adduser.html b/deps/npm/html/doc/cli/npm-adduser.html
index 3726c08e4befa7..ea15a6163a3054 100644
--- a/deps/npm/html/doc/cli/npm-adduser.html
+++ b/deps/npm/html/doc/cli/npm-adduser.html
@@ -72,5 +72,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-bin.html b/deps/npm/html/doc/cli/npm-bin.html
index ae284bca40032d..9f44db69551d69 100644
--- a/deps/npm/html/doc/cli/npm-bin.html
+++ b/deps/npm/html/doc/cli/npm-bin.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-bugs.html b/deps/npm/html/doc/cli/npm-bugs.html
index f0ce3dea8d2cfc..aa2592a3922119 100644
--- a/deps/npm/html/doc/cli/npm-bugs.html
+++ b/deps/npm/html/doc/cli/npm-bugs.html
@@ -56,5 +56,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-build.html b/deps/npm/html/doc/cli/npm-build.html
index c1bebde34511b8..af1255c14168b7 100644
--- a/deps/npm/html/doc/cli/npm-build.html
+++ b/deps/npm/html/doc/cli/npm-build.html
@@ -40,5 +40,5 @@ DESCRIPTION
-
+
diff --git a/deps/npm/html/doc/cli/npm-bundle.html b/deps/npm/html/doc/cli/npm-bundle.html
index 094047a5b19c19..1f50a8b0f4e22a 100644
--- a/deps/npm/html/doc/cli/npm-bundle.html
+++ b/deps/npm/html/doc/cli/npm-bundle.html
@@ -31,5 +31,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-cache.html b/deps/npm/html/doc/cli/npm-cache.html
index d8f26eeab18a9f..165a24f9aec527 100644
--- a/deps/npm/html/doc/cli/npm-cache.html
+++ b/deps/npm/html/doc/cli/npm-cache.html
@@ -81,5 +81,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-completion.html b/deps/npm/html/doc/cli/npm-completion.html
index d21cb02260c3d8..d48a0721082136 100644
--- a/deps/npm/html/doc/cli/npm-completion.html
+++ b/deps/npm/html/doc/cli/npm-completion.html
@@ -42,5 +42,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-config.html b/deps/npm/html/doc/cli/npm-config.html
index daa9fb820aea5f..ba0fc7f5aa27dd 100644
--- a/deps/npm/html/doc/cli/npm-config.html
+++ b/deps/npm/html/doc/cli/npm-config.html
@@ -68,5 +68,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-dedupe.html b/deps/npm/html/doc/cli/npm-dedupe.html
index 0ab750c4578bec..4c5cebfb449a8f 100644
--- a/deps/npm/html/doc/cli/npm-dedupe.html
+++ b/deps/npm/html/doc/cli/npm-dedupe.html
@@ -65,5 +65,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-deprecate.html b/deps/npm/html/doc/cli/npm-deprecate.html
index 937837c96a9ea7..02ae63d41424ab 100644
--- a/deps/npm/html/doc/cli/npm-deprecate.html
+++ b/deps/npm/html/doc/cli/npm-deprecate.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-dist-tag.html b/deps/npm/html/doc/cli/npm-dist-tag.html
index 364665185cac32..dfd245fef1b18e 100644
--- a/deps/npm/html/doc/cli/npm-dist-tag.html
+++ b/deps/npm/html/doc/cli/npm-dist-tag.html
@@ -87,5 +87,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-docs.html b/deps/npm/html/doc/cli/npm-docs.html
index e9631775626679..1059759b2b1e20 100644
--- a/deps/npm/html/doc/cli/npm-docs.html
+++ b/deps/npm/html/doc/cli/npm-docs.html
@@ -56,5 +56,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-edit.html b/deps/npm/html/doc/cli/npm-edit.html
index 420d93b68922dd..3fba9940e0bfa6 100644
--- a/deps/npm/html/doc/cli/npm-edit.html
+++ b/deps/npm/html/doc/cli/npm-edit.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-explore.html b/deps/npm/html/doc/cli/npm-explore.html
index 85f4c56383e499..fef17b26e0ed35 100644
--- a/deps/npm/html/doc/cli/npm-explore.html
+++ b/deps/npm/html/doc/cli/npm-explore.html
@@ -49,5 +49,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-help-search.html b/deps/npm/html/doc/cli/npm-help-search.html
index 7d99b003138bc2..d1bf8924b6ba2e 100644
--- a/deps/npm/html/doc/cli/npm-help-search.html
+++ b/deps/npm/html/doc/cli/npm-help-search.html
@@ -46,5 +46,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-help.html b/deps/npm/html/doc/cli/npm-help.html
index ebb12d699ec2b8..fb288ba13e826d 100644
--- a/deps/npm/html/doc/cli/npm-help.html
+++ b/deps/npm/html/doc/cli/npm-help.html
@@ -52,5 +52,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-init.html b/deps/npm/html/doc/cli/npm-init.html
index 02a46fcbbff03e..6fbad20eca47b6 100644
--- a/deps/npm/html/doc/cli/npm-init.html
+++ b/deps/npm/html/doc/cli/npm-init.html
@@ -48,5 +48,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-install.html b/deps/npm/html/doc/cli/npm-install.html
index a4637c87319e99..20162d1c9f4ba7 100644
--- a/deps/npm/html/doc/cli/npm-install.html
+++ b/deps/npm/html/doc/cli/npm-install.html
@@ -281,5 +281,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-link.html b/deps/npm/html/doc/cli/npm-link.html
index 5f22e1d2719d65..a85e3939b8baf8 100644
--- a/deps/npm/html/doc/cli/npm-link.html
+++ b/deps/npm/html/doc/cli/npm-link.html
@@ -73,5 +73,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/cli/npm-logout.html b/deps/npm/html/doc/cli/npm-logout.html
index d4bf22526f2b3d..1601bae0fd15b3 100644
--- a/deps/npm/html/doc/cli/npm-logout.html
+++ b/deps/npm/html/doc/cli/npm-logout.html
@@ -51,5 +51,5 @@ scope
-
+
diff --git a/deps/npm/html/doc/cli/npm-ls.html b/deps/npm/html/doc/cli/npm-ls.html
index e97a2d28326ce6..1f2a04c917415d 100644
--- a/deps/npm/html/doc/cli/npm-ls.html
+++ b/deps/npm/html/doc/cli/npm-ls.html
@@ -22,7 +22,7 @@ SYNOPSIS
limit the results to only the paths to the packages named. Note that
nested packages will also show the paths to the specified packages.
For example, running npm ls promzard
in npm's source tree will show:
npm@2.15.4 /path/to/npm
+npm@2.15.5 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
It will print out extraneous, missing, and invalid packages.
@@ -97,5 +97,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-outdated.html b/deps/npm/html/doc/cli/npm-outdated.html
index 1dc705eda9c1d7..bc1fa06883878e 100644
--- a/deps/npm/html/doc/cli/npm-outdated.html
+++ b/deps/npm/html/doc/cli/npm-outdated.html
@@ -116,5 +116,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-owner.html b/deps/npm/html/doc/cli/npm-owner.html
index aa6da5860bfdd3..1aa44ebdd50a0f 100644
--- a/deps/npm/html/doc/cli/npm-owner.html
+++ b/deps/npm/html/doc/cli/npm-owner.html
@@ -51,5 +51,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-pack.html b/deps/npm/html/doc/cli/npm-pack.html
index 98045fc2c218a2..6d83ba1de7ede6 100644
--- a/deps/npm/html/doc/cli/npm-pack.html
+++ b/deps/npm/html/doc/cli/npm-pack.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-ping.html b/deps/npm/html/doc/cli/npm-ping.html
index aa0e2ac014a936..9bf1dcbd5e91b4 100644
--- a/deps/npm/html/doc/cli/npm-ping.html
+++ b/deps/npm/html/doc/cli/npm-ping.html
@@ -32,4 +32,4 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prefix.html b/deps/npm/html/doc/cli/npm-prefix.html
index 2509a789431349..614d420d4e5e4f 100644
--- a/deps/npm/html/doc/cli/npm-prefix.html
+++ b/deps/npm/html/doc/cli/npm-prefix.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-prune.html b/deps/npm/html/doc/cli/npm-prune.html
index cabc4d151ee371..72afc505a3dc7e 100644
--- a/deps/npm/html/doc/cli/npm-prune.html
+++ b/deps/npm/html/doc/cli/npm-prune.html
@@ -41,5 +41,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-publish.html b/deps/npm/html/doc/cli/npm-publish.html
index 2ec073a1e18df8..6f01fa806d56dd 100644
--- a/deps/npm/html/doc/cli/npm-publish.html
+++ b/deps/npm/html/doc/cli/npm-publish.html
@@ -69,5 +69,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-rebuild.html b/deps/npm/html/doc/cli/npm-rebuild.html
index 70a0dac7b116d8..17c5272080e9d3 100644
--- a/deps/npm/html/doc/cli/npm-rebuild.html
+++ b/deps/npm/html/doc/cli/npm-rebuild.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-repo.html b/deps/npm/html/doc/cli/npm-repo.html
index b38d4612cc71b2..ff48563e4a1d94 100644
--- a/deps/npm/html/doc/cli/npm-repo.html
+++ b/deps/npm/html/doc/cli/npm-repo.html
@@ -42,5 +42,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-restart.html b/deps/npm/html/doc/cli/npm-restart.html
index 137f9c97bf54b2..e74e528c6376c2 100644
--- a/deps/npm/html/doc/cli/npm-restart.html
+++ b/deps/npm/html/doc/cli/npm-restart.html
@@ -53,5 +53,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-rm.html b/deps/npm/html/doc/cli/npm-rm.html
index b1adf083af6642..a7079518a5551b 100644
--- a/deps/npm/html/doc/cli/npm-rm.html
+++ b/deps/npm/html/doc/cli/npm-rm.html
@@ -39,5 +39,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-root.html b/deps/npm/html/doc/cli/npm-root.html
index b97b2647cbff37..ece73937748763 100644
--- a/deps/npm/html/doc/cli/npm-root.html
+++ b/deps/npm/html/doc/cli/npm-root.html
@@ -35,5 +35,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-run-script.html b/deps/npm/html/doc/cli/npm-run-script.html
index a88ed5fafc9d60..c8c733e304869f 100644
--- a/deps/npm/html/doc/cli/npm-run-script.html
+++ b/deps/npm/html/doc/cli/npm-run-script.html
@@ -57,5 +57,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-search.html b/deps/npm/html/doc/cli/npm-search.html
index 179cc4dc55a5b0..faa3dfebf2cc04 100644
--- a/deps/npm/html/doc/cli/npm-search.html
+++ b/deps/npm/html/doc/cli/npm-search.html
@@ -57,5 +57,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-shrinkwrap.html b/deps/npm/html/doc/cli/npm-shrinkwrap.html
index cf84b213ac509a..a26f1c45bc82f1 100644
--- a/deps/npm/html/doc/cli/npm-shrinkwrap.html
+++ b/deps/npm/html/doc/cli/npm-shrinkwrap.html
@@ -168,5 +168,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-star.html b/deps/npm/html/doc/cli/npm-star.html
index 1dbf18645eec2b..4ef8bf2873bd66 100644
--- a/deps/npm/html/doc/cli/npm-star.html
+++ b/deps/npm/html/doc/cli/npm-star.html
@@ -36,5 +36,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-stars.html b/deps/npm/html/doc/cli/npm-stars.html
index c448460952d9e3..ba1a995dc8fd99 100644
--- a/deps/npm/html/doc/cli/npm-stars.html
+++ b/deps/npm/html/doc/cli/npm-stars.html
@@ -37,5 +37,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-start.html b/deps/npm/html/doc/cli/npm-start.html
index 2758387781b5fe..1edd091dff1043 100644
--- a/deps/npm/html/doc/cli/npm-start.html
+++ b/deps/npm/html/doc/cli/npm-start.html
@@ -39,5 +39,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-stop.html b/deps/npm/html/doc/cli/npm-stop.html
index 9de0a12540cf25..503e36cd578f6a 100644
--- a/deps/npm/html/doc/cli/npm-stop.html
+++ b/deps/npm/html/doc/cli/npm-stop.html
@@ -34,5 +34,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-tag.html b/deps/npm/html/doc/cli/npm-tag.html
index 03d214deaab17b..573353ab797763 100644
--- a/deps/npm/html/doc/cli/npm-tag.html
+++ b/deps/npm/html/doc/cli/npm-tag.html
@@ -62,5 +62,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-team.html b/deps/npm/html/doc/cli/npm-team.html
index 47efc6020cc258..6b5e185e7346b4 100644
--- a/deps/npm/html/doc/cli/npm-team.html
+++ b/deps/npm/html/doc/cli/npm-team.html
@@ -67,4 +67,4 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-test.html b/deps/npm/html/doc/cli/npm-test.html
index 4d44dc96ec0308..f05d2b309b95a7 100644
--- a/deps/npm/html/doc/cli/npm-test.html
+++ b/deps/npm/html/doc/cli/npm-test.html
@@ -38,5 +38,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-uninstall.html b/deps/npm/html/doc/cli/npm-uninstall.html
index ae963c7ab4fc83..dd919844052161 100644
--- a/deps/npm/html/doc/cli/npm-uninstall.html
+++ b/deps/npm/html/doc/cli/npm-uninstall.html
@@ -57,5 +57,5 @@ SYNOPSIS
-
+
diff --git a/deps/npm/html/doc/cli/npm-unpublish.html b/deps/npm/html/doc/cli/npm-unpublish.html
index 8d802c3f3fc934..545729a5e8e1bd 100644
--- a/deps/npm/html/doc/cli/npm-unpublish.html
+++ b/deps/npm/html/doc/cli/npm-unpublish.html
@@ -47,5 +47,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-update.html b/deps/npm/html/doc/cli/npm-update.html
index 0e403137c31f75..6936fd0595c5de 100644
--- a/deps/npm/html/doc/cli/npm-update.html
+++ b/deps/npm/html/doc/cli/npm-update.html
@@ -117,5 +117,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-version.html b/deps/npm/html/doc/cli/npm-version.html
index d2ac87533e2b74..01220efbe84d69 100644
--- a/deps/npm/html/doc/cli/npm-version.html
+++ b/deps/npm/html/doc/cli/npm-version.html
@@ -95,5 +95,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-view.html b/deps/npm/html/doc/cli/npm-view.html
index cf62395eed2ab3..afbdb8d1373804 100644
--- a/deps/npm/html/doc/cli/npm-view.html
+++ b/deps/npm/html/doc/cli/npm-view.html
@@ -85,5 +85,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm-whoami.html b/deps/npm/html/doc/cli/npm-whoami.html
index 587bed952cb93b..20d9261b22859b 100644
--- a/deps/npm/html/doc/cli/npm-whoami.html
+++ b/deps/npm/html/doc/cli/npm-whoami.html
@@ -33,5 +33,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/cli/npm.html b/deps/npm/html/doc/cli/npm.html
index 23ff98a241a421..ac99ada06eacb9 100644
--- a/deps/npm/html/doc/cli/npm.html
+++ b/deps/npm/html/doc/cli/npm.html
@@ -13,7 +13,7 @@ npm
javascript package manager
SYNOPSIS
npm <command> [args]
VERSION
-2.15.4
+2.15.5
DESCRIPTION
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
@@ -126,7 +126,7 @@
AUTHOR
Isaac Z. Schlueter ::
isaacs ::
@izs ::
-i@izs.me
+i@izs.me
SEE ALSO
- npm-help(1)
@@ -152,5 +152,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-folders.html b/deps/npm/html/doc/files/npm-folders.html
index ce353cd5e38d00..7682df15417643 100644
--- a/deps/npm/html/doc/files/npm-folders.html
+++ b/deps/npm/html/doc/files/npm-folders.html
@@ -183,5 +183,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-global.html b/deps/npm/html/doc/files/npm-global.html
index ce353cd5e38d00..7682df15417643 100644
--- a/deps/npm/html/doc/files/npm-global.html
+++ b/deps/npm/html/doc/files/npm-global.html
@@ -183,5 +183,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npm-json.html b/deps/npm/html/doc/files/npm-json.html
index 57ae75e75d753c..c11cbc5357f680 100644
--- a/deps/npm/html/doc/files/npm-json.html
+++ b/deps/npm/html/doc/files/npm-json.html
@@ -582,5 +582,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/npmrc.html b/deps/npm/html/doc/files/npmrc.html
index 9be16677c185e9..05c3f572a8d146 100644
--- a/deps/npm/html/doc/files/npmrc.html
+++ b/deps/npm/html/doc/files/npmrc.html
@@ -83,5 +83,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/files/package.json.html b/deps/npm/html/doc/files/package.json.html
index 57ae75e75d753c..c11cbc5357f680 100644
--- a/deps/npm/html/doc/files/package.json.html
+++ b/deps/npm/html/doc/files/package.json.html
@@ -582,5 +582,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/index.html b/deps/npm/html/doc/index.html
index 86784765477650..876103b0162d85 100644
--- a/deps/npm/html/doc/index.html
+++ b/deps/npm/html/doc/index.html
@@ -242,5 +242,5 @@ semver(7)
-
+
diff --git a/deps/npm/html/doc/misc/npm-coding-style.html b/deps/npm/html/doc/misc/npm-coding-style.html
index 64d15d7500eb19..fb10e35d7a4b62 100644
--- a/deps/npm/html/doc/misc/npm-coding-style.html
+++ b/deps/npm/html/doc/misc/npm-coding-style.html
@@ -147,5 +147,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-config.html b/deps/npm/html/doc/misc/npm-config.html
index 901ce6dcc38230..8adaff16063476 100644
--- a/deps/npm/html/doc/misc/npm-config.html
+++ b/deps/npm/html/doc/misc/npm-config.html
@@ -812,5 +812,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-developers.html b/deps/npm/html/doc/misc/npm-developers.html
index 01b9f8f2323848..82702688a6b102 100644
--- a/deps/npm/html/doc/misc/npm-developers.html
+++ b/deps/npm/html/doc/misc/npm-developers.html
@@ -195,5 +195,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-disputes.html b/deps/npm/html/doc/misc/npm-disputes.html
index def055a4bf6f2a..8d056c3c5384a5 100644
--- a/deps/npm/html/doc/misc/npm-disputes.html
+++ b/deps/npm/html/doc/misc/npm-disputes.html
@@ -13,7 +13,7 @@ npm-disputes
Handling Module
SYNOPSIS
- Get the author email with
npm owner ls <pkgname>
-- Email the author, CC support@npmjs.com
+- Email the author, CC support@npmjs.com
- After a few weeks, if there's no resolution, we'll sort it out.
Don't squat on package names. Publish code or move out of the way.
@@ -51,12 +51,12 @@ DESCRIPTION
owner (Bob).
- Joe emails Bob, explaining the situation as respectfully as
possible, and what he would like to do with the module name. He
-adds the npm support staff support@npmjs.com to the CC list of
+adds the npm support staff support@npmjs.com to the CC list of
the email. Mention in the email that Bob can run
npm owner add
joe foo
to add Joe as an owner of the foo
package.
- After a reasonable amount of time, if Bob has not responded, or if
Bob and Joe can't come to any sort of resolution, email support
-support@npmjs.com and we'll sort it out. ("Reasonable" is
+support@npmjs.com and we'll sort it out. ("Reasonable" is
usually at least 4 weeks, but extra time is allowed around common
holidays.)
@@ -112,5 +112,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-orgs.html b/deps/npm/html/doc/misc/npm-orgs.html
index 22a3d732c7642e..b52c3541801af0 100644
--- a/deps/npm/html/doc/misc/npm-orgs.html
+++ b/deps/npm/html/doc/misc/npm-orgs.html
@@ -86,4 +86,4 @@ Team Admins create teams
-
+
diff --git a/deps/npm/html/doc/misc/npm-registry.html b/deps/npm/html/doc/misc/npm-registry.html
index d220d9dec49d6a..36aa364aa32f53 100644
--- a/deps/npm/html/doc/misc/npm-registry.html
+++ b/deps/npm/html/doc/misc/npm-registry.html
@@ -70,5 +70,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-scope.html b/deps/npm/html/doc/misc/npm-scope.html
index 62dc4fb42b71c8..1c08416786b552 100644
--- a/deps/npm/html/doc/misc/npm-scope.html
+++ b/deps/npm/html/doc/misc/npm-scope.html
@@ -92,5 +92,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/npm-scripts.html b/deps/npm/html/doc/misc/npm-scripts.html
index 19d7dfa49f3938..e15458dc3187a6 100644
--- a/deps/npm/html/doc/misc/npm-scripts.html
+++ b/deps/npm/html/doc/misc/npm-scripts.html
@@ -207,5 +207,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/removing-npm.html b/deps/npm/html/doc/misc/removing-npm.html
index 4532ad06c4aaea..41bbbd4e3207da 100644
--- a/deps/npm/html/doc/misc/removing-npm.html
+++ b/deps/npm/html/doc/misc/removing-npm.html
@@ -57,5 +57,5 @@ SEE ALSO
-
+
diff --git a/deps/npm/html/doc/misc/semver.html b/deps/npm/html/doc/misc/semver.html
index 3116e42aaf1ba2..9cb0f7e6fd4409 100644
--- a/deps/npm/html/doc/misc/semver.html
+++ b/deps/npm/html/doc/misc/semver.html
@@ -302,5 +302,5 @@ Ranges
-
+
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index 89503697d7e5a3..a1e11b17e62bdd 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -23,7 +23,7 @@ For example, running \fBnpm ls promzard\fP in npm's source tree will show:
.P
.RS 2
.nf
-npm@2.15.4 /path/to/npm
+npm@2.15.5 /path/to/npm
└─┬ init\-package\-json@0\.0\.4
└── promzard@0\.1\.5
.fi
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 0a128ecb8863ab..a466ba281fea6d 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -10,7 +10,7 @@ npm [args]
.RE
.SH VERSION
.P
-2.15.4
+2.15.5
.SH DESCRIPTION
.P
npm is the package manager for the Node JavaScript platform\. It puts
diff --git a/deps/npm/man/man3/npm.3 b/deps/npm/man/man3/npm.3
index 235a66cb9a13d5..4c691a49f7ab10 100644
--- a/deps/npm/man/man3/npm.3
+++ b/deps/npm/man/man3/npm.3
@@ -20,7 +20,7 @@ npm\.load([configObject, ]function (er, npm) {
.RE
.SH VERSION
.P
-2.15.4
+2.15.5
.SH DESCRIPTION
.P
This is the API documentation for npm\.
diff --git a/deps/npm/node_modules/block-stream/bench/block-stream-pause.js b/deps/npm/node_modules/block-stream/bench/block-stream-pause.js
deleted file mode 100644
index 9328844aa69059..00000000000000
--- a/deps/npm/node_modules/block-stream/bench/block-stream-pause.js
+++ /dev/null
@@ -1,70 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- f.pause()
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- f.resume()
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/bench/block-stream.js b/deps/npm/node_modules/block-stream/bench/block-stream.js
deleted file mode 100644
index 1141f3a84c2b31..00000000000000
--- a/deps/npm/node_modules/block-stream/bench/block-stream.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/bench/dropper-pause.js b/deps/npm/node_modules/block-stream/bench/dropper-pause.js
deleted file mode 100644
index 93e4068eea0eda..00000000000000
--- a/deps/npm/node_modules/block-stream/bench/dropper-pause.js
+++ /dev/null
@@ -1,70 +0,0 @@
-var BlockStream = require("dropper")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- f.pause()
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- f.resume()
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/bench/dropper.js b/deps/npm/node_modules/block-stream/bench/dropper.js
deleted file mode 100644
index 55fa13305456e7..00000000000000
--- a/deps/npm/node_modules/block-stream/bench/dropper.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("dropper")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/package.json b/deps/npm/node_modules/block-stream/package.json
index 80227bb69ed8ab..890c67253c5f84 100644
--- a/deps/npm/node_modules/block-stream/package.json
+++ b/deps/npm/node_modules/block-stream/package.json
@@ -6,7 +6,7 @@
},
"name": "block-stream",
"description": "a stream of blocks",
- "version": "0.0.8",
+ "version": "0.0.9",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/block-stream.git"
@@ -19,29 +19,32 @@
"inherits": "~2.0.0"
},
"devDependencies": {
- "tap": "0.x"
+ "tap": "^5.7.1"
},
"scripts": {
- "test": "tap test/"
+ "test": "tap test/*.js --cov"
},
"license": "ISC",
- "gitHead": "b35520314f4763af0788d65a846bb43d9c0a8f02",
+ "files": [
+ "block-stream.js"
+ ],
+ "gitHead": "321cf242ef6d130bb2e59c0565a61ded5dd2673f",
"bugs": {
"url": "https://github.com/isaacs/block-stream/issues"
},
"homepage": "https://github.com/isaacs/block-stream#readme",
- "_id": "block-stream@0.0.8",
- "_shasum": "0688f46da2bbf9cff0c4f68225a0cb95cbe8a46b",
- "_from": "block-stream@0.0.8",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
+ "_id": "block-stream@0.0.9",
+ "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
+ "_from": "block-stream@0.0.9",
+ "_npmVersion": "3.8.5",
+ "_nodeVersion": "5.6.0",
"_npmUser": {
"name": "isaacs",
- "email": "isaacs@npmjs.com"
+ "email": "i@izs.me"
},
"dist": {
- "shasum": "0688f46da2bbf9cff0c4f68225a0cb95cbe8a46b",
- "tarball": "http://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz"
+ "shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
+ "tarball": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz"
},
"maintainers": [
{
@@ -49,6 +52,10 @@
"email": "i@izs.me"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/block-stream-0.0.9.tgz_1462149852620_0.6890447810292244"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz"
+ "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz"
}
diff --git a/deps/npm/node_modules/block-stream/test/basic.js b/deps/npm/node_modules/block-stream/test/basic.js
deleted file mode 100644
index b4b930511e10b2..00000000000000
--- a/deps/npm/node_modules/block-stream/test/basic.js
+++ /dev/null
@@ -1,27 +0,0 @@
-var tap = require("tap")
- , BlockStream = require("../block-stream.js")
-
-tap.test("basic test", function (t) {
- var b = new BlockStream(16)
- var fs = require("fs")
- var fstr = fs.createReadStream(__filename, {encoding: "utf8"})
- fstr.pipe(b)
-
- var stat
- t.doesNotThrow(function () {
- stat = fs.statSync(__filename)
- }, "stat should not throw")
-
- var totalBytes = 0
- b.on("data", function (c) {
- t.equal(c.length, 16, "chunks should be 16 bytes long")
- t.type(c, Buffer, "chunks should be buffer objects")
- totalBytes += c.length
- })
- b.on("end", function () {
- var expectedBytes = stat.size + (16 - stat.size % 16)
- t.equal(totalBytes, expectedBytes, "Should be multiple of 16")
- t.end()
- })
-
-})
diff --git a/deps/npm/node_modules/block-stream/test/nopad-thorough.js b/deps/npm/node_modules/block-stream/test/nopad-thorough.js
deleted file mode 100644
index 7a8de88b5b6dca..00000000000000
--- a/deps/npm/node_modules/block-stream/test/nopad-thorough.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]//, 25]//, 1024]
- , writeSizes = [4, 15, 16, 17, 64 ]//, 64, 100]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/test/nopad.js b/deps/npm/node_modules/block-stream/test/nopad.js
deleted file mode 100644
index 6d38429fbc7c37..00000000000000
--- a/deps/npm/node_modules/block-stream/test/nopad.js
+++ /dev/null
@@ -1,57 +0,0 @@
-var BlockStream = require("../")
-var tap = require("tap")
-
-
-tap.test("don't pad, small writes", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(1)
-
- f.on("data", function (c) {
- t.equal(c.toString(), "abc", "should get 'abc'")
- })
-
- f.on("end", function () { t.end() })
-
- f.write(new Buffer("a"))
- f.write(new Buffer("b"))
- f.write(new Buffer("c"))
- f.end()
-})
-
-tap.test("don't pad, exact write", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(1)
-
- var first = true
- f.on("data", function (c) {
- if (first) {
- first = false
- t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
- } else {
- t.fail("should only get one")
- }
- })
-
- f.on("end", function () { t.end() })
-
- f.end(new Buffer("abcdefghijklmnop"))
-})
-
-tap.test("don't pad, big write", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(2)
-
- var first = true
- f.on("data", function (c) {
- if (first) {
- first = false
- t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
- } else {
- t.equal(c.toString(), "q")
- }
- })
-
- f.on("end", function () { t.end() })
-
- f.end(new Buffer("abcdefghijklmnopq"))
-})
diff --git a/deps/npm/node_modules/block-stream/test/pause-resume.js b/deps/npm/node_modules/block-stream/test/pause-resume.js
deleted file mode 100644
index 64d0d091daca04..00000000000000
--- a/deps/npm/node_modules/block-stream/test/pause-resume.js
+++ /dev/null
@@ -1,73 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]
- , writeSizes = [15, 16, 17]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize)
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
- var paused = false
-
- f.on("data", function (c) {
- timeouts ++
- t.notOk(paused, "should not be paused when emitting data")
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- paused = true
- f.pause()
- process.nextTick(function () {
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- paused = false
- f.resume()
- timeouts --
- })
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = expectChunks * blockSize
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 200)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/test/thorough.js b/deps/npm/node_modules/block-stream/test/thorough.js
deleted file mode 100644
index 1cc9ea08a36770..00000000000000
--- a/deps/npm/node_modules/block-stream/test/thorough.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]//, 25]//, 1024]
- , writeSizes = [4, 15, 16, 17, 64 ]//, 64, 100]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize)
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = expectChunks * blockSize
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/block-stream/test/two-stream.js b/deps/npm/node_modules/block-stream/test/two-stream.js
deleted file mode 100644
index c6db79a43d91d7..00000000000000
--- a/deps/npm/node_modules/block-stream/test/two-stream.js
+++ /dev/null
@@ -1,59 +0,0 @@
-var log = console.log,
- assert = require( 'assert' ),
- BlockStream = require("../block-stream.js"),
- isize = 0, tsize = 0, fsize = 0, psize = 0, i = 0,
- filter = null, paper = null, stack = null,
-
-// a source data buffer
-tsize = 1 * 1024; // <- 1K
-stack = new Buffer( tsize );
-for ( ; i < tsize; i++) stack[i] = "x".charCodeAt(0);
-
-isize = 1 * 1024; // <- initial packet size with 4K no bug!
-fsize = 2 * 1024 ; // <- first block-stream size
-psize = Math.ceil( isize / 6 ); // <- second block-stream size
-
-fexpected = Math.ceil( tsize / fsize ); // <- packets expected for first
-pexpected = Math.ceil( tsize / psize ); // <- packets expected for second
-
-
-filter = new BlockStream( fsize, { nopad : true } );
-paper = new BlockStream( psize, { nopad : true } );
-
-
-var fcounter = 0;
-filter.on( 'data', function (c) {
- // verify that they're not null-padded
- for (var i = 0; i < c.length; i ++) {
- assert.strictEqual(c[i], "x".charCodeAt(0))
- }
- ++fcounter;
-} );
-
-var pcounter = 0;
-paper.on( 'data', function (c) {
- // verify that they're not null-padded
- for (var i = 0; i < c.length; i ++) {
- assert.strictEqual(c[i], "x".charCodeAt(0))
- }
- ++pcounter;
-} );
-
-filter.pipe( paper );
-
-filter.on( 'end', function () {
- log("fcounter: %s === %s", fcounter, fexpected)
- assert.strictEqual( fcounter, fexpected );
-} );
-
-paper.on( 'end', function () {
- log("pcounter: %s === %s", pcounter, pexpected);
- assert.strictEqual( pcounter, pexpected );
-} );
-
-
-for ( i = 0, j = isize; j <= tsize; j += isize ) {
- filter.write( stack.slice( j - isize, j ) );
-}
-
-filter.end();
diff --git a/deps/npm/node_modules/graceful-fs/README.md b/deps/npm/node_modules/graceful-fs/README.md
index d920aaac9e17af..d0dcd492549537 100644
--- a/deps/npm/node_modules/graceful-fs/README.md
+++ b/deps/npm/node_modules/graceful-fs/README.md
@@ -9,8 +9,6 @@ resilient to errors.
## Improvements over [fs module](http://api.nodejs.org/fs.html)
-graceful-fs:
-
* Queues up `open` and `readdir` calls, and retries them once
something closes if there is an EMFILE error from too many file
descriptors.
@@ -51,3 +49,85 @@ This should only ever be done at the top-level application layer, in
order to delay on EMFILE errors from any fs-using dependencies. You
should **not** do this in a library, because it can cause unexpected
delays in other parts of the program.
+
+## Changes
+
+This module is fairly stable at this point, and used by a lot of
+things. That being said, because it implements a subtle behavior
+change in a core part of the node API, even modest changes can be
+extremely breaking, and the versioning is thus biased towards
+bumping the major when in doubt.
+
+The main change between major versions has been switching between
+providing a fully-patched `fs` module vs monkey-patching the node core
+builtin, and the approach by which a non-monkey-patched `fs` was
+created.
+
+The goal is to trade `EMFILE` errors for slower fs operations. So, if
+you try to open a zillion files, rather than crashing, `open`
+operations will be queued up and wait for something else to `close`.
+
+There are advantages to each approach. Monkey-patching the fs means
+that no `EMFILE` errors can possibly occur anywhere in your
+application, because everything is using the same core `fs` module,
+which is patched. However, it can also obviously cause undesirable
+side-effects, especially if the module is loaded multiple times.
+
+Implementing a separate-but-identical patched `fs` module is more
+surgical (and doesn't run the risk of patching multiple times), but
+also imposes the challenge of keeping in sync with the core module.
+
+The current approach loads the `fs` module, and then creates a
+lookalike object that has all the same methods, except a few that are
+patched. It is safe to use in all versions of Node from 0.8 through
+7.0.
+
+### v4
+
+* Do not monkey-patch the fs module. This module may now be used as a
+ drop-in dep, and users can opt into monkey-patching the fs builtin
+ if their app requires it.
+
+### v3
+
+* Monkey-patch fs, because the eval approach no longer works on recent
+ node.
+* fixed possible type-error throw if rename fails on windows
+* verify that we *never* get EMFILE errors
+* Ignore ENOSYS from chmod/chown
+* clarify that graceful-fs must be used as a drop-in
+
+### v2.1.0
+
+* Use eval rather than monkey-patching fs.
+* readdir: Always sort the results
+* win32: requeue a file if error has an OK status
+
+### v2.0
+
+* A return to monkey patching
+* wrap process.cwd
+
+### v1.1
+
+* wrap readFile
+* Wrap fs.writeFile.
+* readdir protection
+* Don't clobber the fs builtin
+* Handle fs.read EAGAIN errors by trying again
+* Expose the curOpen counter
+* No-op lchown/lchmod if not implemented
+* fs.rename patch only for win32
+* Patch fs.rename to handle AV software on Windows
+* Close #4 Chown should not fail on einval or eperm if non-root
+* Fix isaacs/fstream#1 Only wrap fs one time
+* Fix #3 Start at 1024 max files, then back off on EMFILE
+* lutimes that doens't blow up on Linux
+* A full on-rewrite using a queue instead of just swallowing the EMFILE error
+* Wrap Read/Write streams as well
+
+### 1.0
+
+* Update engines for node 0.6
+* Be lstat-graceful on Windows
+* first
diff --git a/deps/npm/node_modules/graceful-fs/package.json b/deps/npm/node_modules/graceful-fs/package.json
index d519c205d901d5..4f195d647cded0 100644
--- a/deps/npm/node_modules/graceful-fs/package.json
+++ b/deps/npm/node_modules/graceful-fs/package.json
@@ -1,7 +1,7 @@
{
"name": "graceful-fs",
"description": "A drop-in replacement for fs, making various improvements.",
- "version": "4.1.3",
+ "version": "4.1.4",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/node-graceful-fs.git"
@@ -44,23 +44,23 @@
"legacy-streams.js",
"polyfills.js"
],
- "gitHead": "694c56f3aed4aee62d6df169be123d3984f61b85",
+ "gitHead": "fe8f05ccc2779d1dfa6db6173f3ed64f1e9aa72c",
"bugs": {
"url": "https://github.com/isaacs/node-graceful-fs/issues"
},
"homepage": "https://github.com/isaacs/node-graceful-fs#readme",
- "_id": "graceful-fs@4.1.3",
- "_shasum": "92033ce11113c41e2628d61fdfa40bc10dc0155c",
- "_from": "graceful-fs@latest",
- "_npmVersion": "3.7.0",
- "_nodeVersion": "4.0.0",
+ "_id": "graceful-fs@4.1.4",
+ "_shasum": "ef089d2880f033b011823ce5c8fae798da775dbd",
+ "_from": "graceful-fs@4.1.4",
+ "_npmVersion": "3.8.9",
+ "_nodeVersion": "5.6.0",
"_npmUser": {
"name": "isaacs",
"email": "i@izs.me"
},
"dist": {
- "shasum": "92033ce11113c41e2628d61fdfa40bc10dc0155c",
- "tarball": "http://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.3.tgz"
+ "shasum": "ef089d2880f033b011823ce5c8fae798da775dbd",
+ "tarball": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.4.tgz"
},
"maintainers": [
{
@@ -69,8 +69,9 @@
}
],
"_npmOperationalInternal": {
- "host": "packages-6-west.internal.npmjs.com",
- "tmp": "tmp/graceful-fs-4.1.3.tgz_1454449326495_0.943017533281818"
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/graceful-fs-4.1.4.tgz_1462474854900_0.9423982477746904"
},
- "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.3.tgz"
+ "_resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.4.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/lru-cache/.npmignore b/deps/npm/node_modules/lru-cache/.npmignore
deleted file mode 100644
index 07e6e472cc75fa..00000000000000
--- a/deps/npm/node_modules/lru-cache/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-/node_modules
diff --git a/deps/npm/node_modules/lru-cache/.travis.yml b/deps/npm/node_modules/lru-cache/.travis.yml
deleted file mode 100644
index 4af02b3d17e64c..00000000000000
--- a/deps/npm/node_modules/lru-cache/.travis.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-language: node_js
-node_js:
- - '0.8'
- - '0.10'
- - '0.12'
- - 'iojs'
-before_install:
- - npm install -g npm@latest
diff --git a/deps/npm/node_modules/lru-cache/CONTRIBUTORS b/deps/npm/node_modules/lru-cache/CONTRIBUTORS
deleted file mode 100644
index 4a0bc5033a06e7..00000000000000
--- a/deps/npm/node_modules/lru-cache/CONTRIBUTORS
+++ /dev/null
@@ -1,14 +0,0 @@
-# Authors, sorted by whether or not they are me
-Isaac Z. Schlueter
-Brian Cottingham
-Carlos Brito Lage
-Jesse Dailey
-Kevin O'Hara
-Marco Rogers
-Mark Cavage
-Marko Mikulicic
-Nathan Rajlich
-Satheesh Natesan
-Trent Mick
-ashleybrener
-n4kz
diff --git a/deps/npm/node_modules/lru-cache/README.md b/deps/npm/node_modules/lru-cache/README.md
index f804699809b9b9..1bc7b03f3ea731 100644
--- a/deps/npm/node_modules/lru-cache/README.md
+++ b/deps/npm/node_modules/lru-cache/README.md
@@ -2,6 +2,8 @@
A cache object that deletes the least-recently-used items.
+[](https://travis-ci.org/isaacs/node-lru-cache) [](https://coveralls.io/github/isaacs/node-lru-cache)
+
## Usage:
```javascript
@@ -65,8 +67,12 @@ away.
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
- They do what you think. `max` is optional and overrides the
- cache `max` option if provided.
+ They do what you think. `maxAge` is optional and overrides the
+ cache `maxAge` option if provided.
+
+ If the key is not found, `get()` will return `undefined`.
+
+ The key and val can be any value.
* `peek(key)`
@@ -110,7 +116,7 @@ away.
Return an array of the values in the cache.
-* `length()`
+* `length`
Return total length of objects in cache taking into account
`length` options function.
@@ -130,3 +136,7 @@ away.
Loads another cache entries array, obtained with `sourceCache.dump()`,
into the cache. The destination cache is reset before loading new entries
+
+* `prune()`
+
+ Manually iterates over the entire cache proactively pruning old entries
diff --git a/deps/npm/node_modules/lru-cache/lib/lru-cache.js b/deps/npm/node_modules/lru-cache/lib/lru-cache.js
index dccc6d59d27e15..e98ef78a53dc79 100644
--- a/deps/npm/node_modules/lru-cache/lib/lru-cache.js
+++ b/deps/npm/node_modules/lru-cache/lib/lru-cache.js
@@ -3,104 +3,168 @@ module.exports = LRUCache
// This will be a proper iterable 'Map' in engines that support it,
// or a fakey-fake PseudoMap in older versions.
var Map = require('pseudomap')
+var util = require('util')
+
+// A linked list to keep track of recently-used-ness
+var Yallist = require('yallist')
+
+// use symbols if possible, otherwise just _props
+var symbols = {}
+var hasSymbol = typeof Symbol === 'function'
+var makeSymbol
+/* istanbul ignore if */
+if (hasSymbol) {
+ makeSymbol = function (key) {
+ return Symbol.for(key)
+ }
+} else {
+ makeSymbol = function (key) {
+ return '_' + key
+ }
+}
+
+function priv (obj, key, val) {
+ var sym
+ if (symbols[key]) {
+ sym = symbols[key]
+ } else {
+ sym = makeSymbol(key)
+ symbols[key] = sym
+ }
+ if (arguments.length === 2) {
+ return obj[sym]
+ } else {
+ obj[sym] = val
+ return val
+ }
+}
function naiveLength () { return 1 }
+// lruList is a yallist where the head is the youngest
+// item, and the tail is the oldest. the list contains the Hit
+// objects as the entries.
+// Each Hit object has a reference to its Yallist.Node. This
+// never changes.
+//
+// cache is a Map (or PseudoMap) that matches the keys to
+// the Yallist.Node object.
function LRUCache (options) {
- if (!(this instanceof LRUCache))
+ if (!(this instanceof LRUCache)) {
return new LRUCache(options)
+ }
- if (typeof options === 'number')
+ if (typeof options === 'number') {
options = { max: options }
+ }
- if (!options)
+ if (!options) {
options = {}
+ }
- this._max = options.max
+ var max = priv(this, 'max', options.max)
// Kind of weird to have a default max of Infinity, but oh well.
- if (!this._max || !(typeof this._max === "number") || this._max <= 0 )
- this._max = Infinity
+ if (!max ||
+ !(typeof max === 'number') ||
+ max <= 0) {
+ priv(this, 'max', Infinity)
+ }
- this._lengthCalculator = options.length || naiveLength
- if (typeof this._lengthCalculator !== "function")
- this._lengthCalculator = naiveLength
+ var lc = options.length || naiveLength
+ if (typeof lc !== 'function') {
+ lc = naiveLength
+ }
+ priv(this, 'lengthCalculator', lc)
- this._allowStale = options.stale || false
- this._maxAge = options.maxAge || null
- this._dispose = options.dispose
+ priv(this, 'allowStale', options.stale || false)
+ priv(this, 'maxAge', options.maxAge || 0)
+ priv(this, 'dispose', options.dispose)
this.reset()
}
// resize the cache when the max changes.
-Object.defineProperty(LRUCache.prototype, "max",
- { set : function (mL) {
- if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity
- this._max = mL
- if (this._length > this._max) trim(this)
+Object.defineProperty(LRUCache.prototype, 'max', {
+ set: function (mL) {
+ if (!mL || !(typeof mL === 'number') || mL <= 0) {
+ mL = Infinity
}
- , get : function () { return this._max }
- , enumerable : true
- })
+ priv(this, 'max', mL)
+ trim(this)
+ },
+ get: function () {
+ return priv(this, 'max')
+ },
+ enumerable: true
+})
+
+Object.defineProperty(LRUCache.prototype, 'allowStale', {
+ set: function (allowStale) {
+ priv(this, 'allowStale', !!allowStale)
+ },
+ get: function () {
+ return priv(this, 'allowStale')
+ },
+ enumerable: true
+})
+
+Object.defineProperty(LRUCache.prototype, 'maxAge', {
+ set: function (mA) {
+ if (!mA || !(typeof mA === 'number') || mA < 0) {
+ mA = 0
+ }
+ priv(this, 'maxAge', mA)
+ trim(this)
+ },
+ get: function () {
+ return priv(this, 'maxAge')
+ },
+ enumerable: true
+})
// resize the cache when the lengthCalculator changes.
-Object.defineProperty(LRUCache.prototype, "lengthCalculator",
- { set : function (lC) {
- if (typeof lC !== "function") {
- this._lengthCalculator = naiveLength
- this._length = this._lruList.size
- this._cache.forEach(function (value, key) {
- value.length = 1
- })
- } else {
- this._lengthCalculator = lC
- this._length = 0
- this._cache.forEach(function (value, key) {
- value.length = this._lengthCalculator(value.value, key)
- this._length += value.length
- }, this)
- }
-
- if (this._length > this._max) trim(this)
+Object.defineProperty(LRUCache.prototype, 'lengthCalculator', {
+ set: function (lC) {
+ if (typeof lC !== 'function') {
+ lC = naiveLength
}
- , get : function () { return this._lengthCalculator }
- , enumerable : true
- })
-
-Object.defineProperty(LRUCache.prototype, "length",
- { get : function () { return this._length }
- , enumerable : true
- })
+ if (lC !== priv(this, 'lengthCalculator')) {
+ priv(this, 'lengthCalculator', lC)
+ priv(this, 'length', 0)
+ priv(this, 'lruList').forEach(function (hit) {
+ hit.length = priv(this, 'lengthCalculator').call(this, hit.value, hit.key)
+ priv(this, 'length', priv(this, 'length') + hit.length)
+ }, this)
+ }
+ trim(this)
+ },
+ get: function () { return priv(this, 'lengthCalculator') },
+ enumerable: true
+})
-Object.defineProperty(LRUCache.prototype, "itemCount",
- { get : function () { return this._lruList.size }
- , enumerable : true
- })
+Object.defineProperty(LRUCache.prototype, 'length', {
+ get: function () { return priv(this, 'length') },
+ enumerable: true
+})
-function reverseKeys (map) {
- // keys live in lruList map in insertion order.
- // we want them in reverse insertion order.
- // flip the list of keys.
- var itemCount = map.size
- var keys = new Array(itemCount)
- var i = itemCount
- map.forEach(function (value, key) {
- keys[--i] = key
- })
-
- return keys
-}
+Object.defineProperty(LRUCache.prototype, 'itemCount', {
+ get: function () { return priv(this, 'lruList').length },
+ enumerable: true
+})
LRUCache.prototype.rforEach = function (fn, thisp) {
thisp = thisp || this
- this._lruList.forEach(function (hit) {
- forEachStep(this, fn, hit, thisp)
- }, this)
+ for (var walker = priv(this, 'lruList').tail; walker !== null;) {
+ var prev = walker.prev
+ forEachStep(this, fn, walker, thisp)
+ walker = prev
+ }
}
-function forEachStep (self, fn, hit, thisp) {
+function forEachStep (self, fn, node, thisp) {
+ var hit = node.value
if (isStale(self, hit)) {
- del(self, hit)
- if (!self._allowStale) {
+ del(self, node)
+ if (!priv(self, 'allowStale')) {
hit = undefined
}
}
@@ -109,49 +173,43 @@ function forEachStep (self, fn, hit, thisp) {
}
}
-
LRUCache.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
-
- var keys = reverseKeys(this._lruList)
- for (var k = 0; k < keys.length; k++) {
- var hit = this._lruList.get(keys[k])
- forEachStep(this, fn, hit, thisp)
+ for (var walker = priv(this, 'lruList').head; walker !== null;) {
+ var next = walker.next
+ forEachStep(this, fn, walker, thisp)
+ walker = next
}
}
LRUCache.prototype.keys = function () {
- return reverseKeys(this._lruList).map(function (k) {
- return this._lruList.get(k).key
+ return priv(this, 'lruList').toArray().map(function (k) {
+ return k.key
}, this)
}
LRUCache.prototype.values = function () {
- return reverseKeys(this._lruList).map(function (k) {
- return this._lruList.get(k).value
+ return priv(this, 'lruList').toArray().map(function (k) {
+ return k.value
}, this)
}
LRUCache.prototype.reset = function () {
- if (this._dispose && this._cache) {
- this._cache.forEach(function (entry, key) {
- this._dispose(key, entry.value)
+ if (priv(this, 'dispose') &&
+ priv(this, 'lruList') &&
+ priv(this, 'lruList').length) {
+ priv(this, 'lruList').forEach(function (hit) {
+ priv(this, 'dispose').call(this, hit.key, hit.value)
}, this)
}
- this._cache = new Map() // hash of items by key
- this._lruList = new Map() // list of items in order of use recency
- this._mru = 0 // most recently used
- this._lru = 0 // least recently used
- this._length = 0 // number of items in the list
+ priv(this, 'cache', new Map()) // hash of items by key
+ priv(this, 'lruList', new Yallist()) // list of items in order of use recency
+ priv(this, 'length', 0) // length of items in the list
}
LRUCache.prototype.dump = function () {
- var arr = []
- var i = 0
- var size = this._lruList.size
- return reverseKeys(this._lruList).map(function (k) {
- var hit = this._lruList.get(k)
+ return priv(this, 'lruList').map(function (hit) {
if (!isStale(this, hit)) {
return {
k: hit.key,
@@ -159,68 +217,137 @@ LRUCache.prototype.dump = function () {
e: hit.now + (hit.maxAge || 0)
}
}
- }, this).filter(function (h) {
+ }, this).toArray().filter(function (h) {
return h
})
}
LRUCache.prototype.dumpLru = function () {
- return this._lruList
+ return priv(this, 'lruList')
+}
+
+LRUCache.prototype.inspect = function (n, opts) {
+ var str = 'LRUCache {'
+ var extras = false
+
+ var as = priv(this, 'allowStale')
+ if (as) {
+ str += '\n allowStale: true'
+ extras = true
+ }
+
+ var max = priv(this, 'max')
+ if (max && max !== Infinity) {
+ if (extras) {
+ str += ','
+ }
+ str += '\n max: ' + util.inspect(max, opts)
+ extras = true
+ }
+
+ var maxAge = priv(this, 'maxAge')
+ if (maxAge) {
+ if (extras) {
+ str += ','
+ }
+ str += '\n maxAge: ' + util.inspect(maxAge, opts)
+ extras = true
+ }
+
+ var lc = priv(this, 'lengthCalculator')
+ if (lc && lc !== naiveLength) {
+ if (extras) {
+ str += ','
+ }
+ str += '\n length: ' + util.inspect(priv(this, 'length'), opts)
+ extras = true
+ }
+
+ var didFirst = false
+ priv(this, 'lruList').forEach(function (item) {
+ if (didFirst) {
+ str += ',\n '
+ } else {
+ if (extras) {
+ str += ',\n'
+ }
+ didFirst = true
+ str += '\n '
+ }
+ var key = util.inspect(item.key).split('\n').join('\n ')
+ var val = { value: item.value }
+ if (item.maxAge !== maxAge) {
+ val.maxAge = item.maxAge
+ }
+ if (lc !== naiveLength) {
+ val.length = item.length
+ }
+ if (isStale(this, item)) {
+ val.stale = true
+ }
+
+ val = util.inspect(val, opts).split('\n').join('\n ')
+ str += key + ' => ' + val
+ })
+
+ if (didFirst || extras) {
+ str += '\n'
+ }
+ str += '}'
+
+ return str
}
LRUCache.prototype.set = function (key, value, maxAge) {
- maxAge = maxAge || this._maxAge
+ maxAge = maxAge || priv(this, 'maxAge')
var now = maxAge ? Date.now() : 0
- var len = this._lengthCalculator(value, key)
+ var len = priv(this, 'lengthCalculator').call(this, value, key)
- if (this._cache.has(key)) {
- if (len > this._max) {
- del(this, this._cache.get(key))
+ if (priv(this, 'cache').has(key)) {
+ if (len > priv(this, 'max')) {
+ del(this, priv(this, 'cache').get(key))
return false
}
- var item = this._cache.get(key)
+ var node = priv(this, 'cache').get(key)
+ var item = node.value
// dispose of the old one before overwriting
- if (this._dispose)
- this._dispose(key, item.value)
+ if (priv(this, 'dispose')) {
+ priv(this, 'dispose').call(this, key, item.value)
+ }
item.now = now
item.maxAge = maxAge
item.value = value
- this._length += (len - item.length)
+ priv(this, 'length', priv(this, 'length') + (len - item.length))
item.length = len
this.get(key)
-
- if (this._length > this._max)
- trim(this)
-
+ trim(this)
return true
}
- var hit = new Entry(key, value, this._mru, len, now, maxAge)
- incMru(this)
+ var hit = new Entry(key, value, len, now, maxAge)
// oversized objects fall out of cache automatically.
- if (hit.length > this._max) {
- if (this._dispose) this._dispose(key, value)
+ if (hit.length > priv(this, 'max')) {
+ if (priv(this, 'dispose')) {
+ priv(this, 'dispose').call(this, key, value)
+ }
return false
}
- this._length += hit.length
- this._cache.set(key, hit)
- this._lruList.set(hit.lu, hit)
-
- if (this._length > this._max)
- trim(this)
-
+ priv(this, 'length', priv(this, 'length') + hit.length)
+ priv(this, 'lruList').unshift(hit)
+ priv(this, 'cache').set(key, priv(this, 'lruList').head)
+ trim(this)
return true
}
LRUCache.prototype.has = function (key) {
- if (!this._cache.has(key)) return false
- var hit = this._cache.get(key)
+ if (!priv(this, 'cache').has(key)) return false
+ var hit = priv(this, 'cache').get(key).value
if (isStale(this, hit)) {
return false
}
@@ -236,18 +363,19 @@ LRUCache.prototype.peek = function (key) {
}
LRUCache.prototype.pop = function () {
- var hit = this._lruList.get(this._lru)
- del(this, hit)
- return hit || null
+ var node = priv(this, 'lruList').tail
+ if (!node) return null
+ del(this, node)
+ return node.value
}
LRUCache.prototype.del = function (key) {
- del(this, this._cache.get(key))
+ del(this, priv(this, 'cache').get(key))
}
LRUCache.prototype.load = function (arr) {
- //reset the cache
- this.reset();
+ // reset the cache
+ this.reset()
var now = Date.now()
// A previous serialized cache has the most recent items first
@@ -267,90 +395,75 @@ LRUCache.prototype.load = function (arr) {
}
}
+LRUCache.prototype.prune = function () {
+ var self = this
+ priv(this, 'cache').forEach(function (value, key) {
+ get(self, key, false)
+ })
+}
+
function get (self, key, doUse) {
- var hit = self._cache.get(key)
- if (hit) {
+ var node = priv(self, 'cache').get(key)
+ if (node) {
+ var hit = node.value
if (isStale(self, hit)) {
- del(self, hit)
- if (!self._allowStale) hit = undefined
+ del(self, node)
+ if (!priv(self, 'allowStale')) hit = undefined
} else {
- if (doUse) use(self, hit)
+ if (doUse) {
+ priv(self, 'lruList').unshiftNode(node)
+ }
}
if (hit) hit = hit.value
}
return hit
}
-function isStale(self, hit) {
- if (!hit || (!hit.maxAge && !self._maxAge)) return false
- var stale = false;
+function isStale (self, hit) {
+ if (!hit || (!hit.maxAge && !priv(self, 'maxAge'))) {
+ return false
+ }
+ var stale = false
var diff = Date.now() - hit.now
if (hit.maxAge) {
stale = diff > hit.maxAge
} else {
- stale = self._maxAge && (diff > self._maxAge)
+ stale = priv(self, 'maxAge') && (diff > priv(self, 'maxAge'))
}
- return stale;
-}
-
-function use (self, hit) {
- shiftLU(self, hit)
- hit.lu = self._mru
- incMru(self)
- self._lruList.set(hit.lu, hit)
+ return stale
}
function trim (self) {
- if (self._length > self._max) {
- var keys = reverseKeys(self._lruList)
- for (var k = keys.length - 1; self._length > self._max; k--) {
+ if (priv(self, 'length') > priv(self, 'max')) {
+ for (var walker = priv(self, 'lruList').tail;
+ priv(self, 'length') > priv(self, 'max') && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
- self._lru = keys[k - 1]
- del(self, self._lruList.get(keys[k]))
+ var prev = walker.prev
+ del(self, walker)
+ walker = prev
}
}
}
-function shiftLU (self, hit) {
- self._lruList.delete(hit.lu)
- if (hit.lu === self._lru)
- self._lru = reverseKeys(self._lruList).pop()
-}
-
-function del (self, hit) {
- if (hit) {
- if (self._dispose) self._dispose(hit.key, hit.value)
- self._length -= hit.length
- self._cache.delete(hit.key)
- shiftLU(self, hit)
+function del (self, node) {
+ if (node) {
+ var hit = node.value
+ if (priv(self, 'dispose')) {
+ priv(self, 'dispose').call(this, hit.key, hit.value)
+ }
+ priv(self, 'length', priv(self, 'length') - hit.length)
+ priv(self, 'cache').delete(hit.key)
+ priv(self, 'lruList').removeNode(node)
}
}
// classy, since V8 prefers predictable objects.
-function Entry (key, value, lu, length, now, maxAge) {
+function Entry (key, value, length, now, maxAge) {
this.key = key
this.value = value
- this.lu = lu
this.length = length
this.now = now
- if (maxAge) this.maxAge = maxAge
-}
-
-
-// Incrementers and decrementers that loop at MAX_SAFE_INTEGER
-// only relevant for the lu, lru, and mru counters, since they
-// get touched a lot and can get very large. Also, since they
-// only go upwards, and the sets will tend to be much smaller than
-// the max, we can very well assume that a very small number comes
-// after a very large number, rather than before it.
-var maxSafeInt = Number.MAX_SAFE_INTEGER || 9007199254740991
-function intInc (number) {
- return (number === maxSafeInt) ? 0 : number + 1
-}
-function incMru (self) {
- do {
- self._mru = intInc(self._mru)
- } while (self._lruList.has(self._mru))
+ this.maxAge = maxAge || 0
}
diff --git a/deps/npm/node_modules/lru-cache/node_modules/pseudomap/map.js b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/map.js
index c0ad39c029718f..7db15994612fd9 100644
--- a/deps/npm/node_modules/lru-cache/node_modules/pseudomap/map.js
+++ b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/map.js
@@ -4,119 +4,6 @@ if (process.env.npm_package_name === 'pseudomap' &&
if (typeof Map === 'function' && !process.env.TEST_PSEUDOMAP) {
module.exports = Map
- return
-}
-
-var hasOwnProperty = Object.prototype.hasOwnProperty
-
-module.exports = PseudoMap
-
-function PseudoMap (set) {
- if (!(this instanceof PseudoMap)) // whyyyyyyy
- throw new TypeError("Constructor PseudoMap requires 'new'")
-
- this.clear()
-
- if (set) {
- if ((set instanceof PseudoMap) ||
- (typeof Map === 'function' && set instanceof Map))
- set.forEach(function (value, key) {
- this.set(key, value)
- }, this)
- else if (Array.isArray(set))
- set.forEach(function (kv) {
- this.set(kv[0], kv[1])
- }, this)
- else
- throw new TypeError('invalid argument')
- }
-}
-
-PseudoMap.prototype.forEach = function (fn, thisp) {
- thisp = thisp || this
- Object.keys(this._data).forEach(function (k) {
- if (k !== 'size')
- fn.call(thisp, this._data[k].value, this._data[k].key)
- }, this)
-}
-
-PseudoMap.prototype.has = function (k) {
- return !!find(this._data, k)
-}
-
-PseudoMap.prototype.get = function (k) {
- var res = find(this._data, k)
- return res && res.value
-}
-
-PseudoMap.prototype.set = function (k, v) {
- set(this._data, k, v)
-}
-
-PseudoMap.prototype.delete = function (k) {
- var res = find(this._data, k)
- if (res) {
- delete this._data[res._index]
- this._data.size--
- }
-}
-
-PseudoMap.prototype.clear = function () {
- var data = Object.create(null)
- data.size = 0
-
- Object.defineProperty(this, '_data', {
- value: data,
- enumerable: false,
- configurable: true,
- writable: false
- })
-}
-
-Object.defineProperty(PseudoMap.prototype, 'size', {
- get: function () {
- return this._data.size
- },
- set: function (n) {},
- enumerable: true,
- configurable: true
-})
-
-PseudoMap.prototype.values =
-PseudoMap.prototype.keys =
-PseudoMap.prototype.entries = function () {
- throw new Error('iterators are not implemented in this version')
-}
-
-// Either identical, or both NaN
-function same (a, b) {
- return a === b || a !== a && b !== b
-}
-
-function Entry (k, v, i) {
- this.key = k
- this.value = v
- this._index = i
-}
-
-function find (data, k) {
- for (var i = 0, s = '_' + k, key = s;
- hasOwnProperty.call(data, key);
- key = s + i++) {
- if (same(data[key].key, k))
- return data[key]
- }
-}
-
-function set (data, k, v) {
- for (var i = 0, s = '_' + k, key = s;
- hasOwnProperty.call(data, key);
- key = s + i++) {
- if (same(data[key].key, k)) {
- data[key].value = v
- return
- }
- }
- data.size++
- data[key] = new Entry(k, v, key)
+} else {
+ module.exports = require('./pseudomap')
}
diff --git a/deps/npm/node_modules/lru-cache/node_modules/pseudomap/package.json b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/package.json
index a0161aca345563..b38d1897c19d5d 100644
--- a/deps/npm/node_modules/lru-cache/node_modules/pseudomap/package.json
+++ b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/package.json
@@ -1,6 +1,6 @@
{
"name": "pseudomap",
- "version": "1.0.1",
+ "version": "1.0.2",
"description": "A thing that is a lot like ES6 `Map`, but without iterators, for use in environments where `for..of` syntax and `Map` are not available.",
"main": "map.js",
"directories": {
@@ -26,10 +26,26 @@
"url": "https://github.com/isaacs/pseudomap/issues"
},
"homepage": "https://github.com/isaacs/pseudomap#readme",
- "readme": "# pseudomap\n\nA thing that is a lot like ES6 `Map`, but without iterators, for use\nin environments where `for..of` syntax and `Map` are not available.\n\nIf you need iterators, or just in general a more faithful polyfill to\nES6 Maps, check out [es6-map](http://npm.im/es6-map).\n\nIf you are in an environment where `Map` is supported, then that will\nbe returned instead, unless `process.env.TEST_PSEUDOMAP` is set.\n\nYou can use any value as keys, and any value as data. Setting again\nwith the identical key will overwrite the previous value.\n\nInternally, data is stored on an `Object.create(null)` style object.\nThe key is coerced to a string to generate the key on the internal\ndata-bag object. The original key used is stored along with the data.\n\nIn the event of a stringified-key collision, a new key is generated by\nappending an increasing number to the stringified-key until finding\neither the intended key or an empty spot.\n\nNote that because object traversal order of plain objects is not\nguaranteed to be identical to insertion order, the insertion order\nguarantee of `Map.prototype.forEach` is not guaranteed in this\nimplementation. However, in all versions of Node.js and V8 where this\nmodule works, `forEach` does traverse data in insertion order.\n\n## API\n\nMost of the [Map\nAPI](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map),\nwith the following exceptions:\n\n1. A `Map` object is not an iterator.\n2. `values`, `keys`, and `entries` methods are not implemented,\n because they return iterators.\n3. The argument to the constructor can be an Array of `[key, value]`\n pairs, or a `Map` or `PseudoMap` object. But, since iterators\n aren't used, passing any plain-old iterator won't initialize the\n map properly.\n\n## USAGE\n\nUse just like a regular ES6 Map.\n\n```javascript\nvar PseudoMap = require('pseudomap')\n\n// optionally provide a pseudomap, or an array of [key,value] pairs\n// as the argument to initialize the map with\nvar myMap = new PseudoMap()\n\nmyMap.set(1, 'number 1')\nmyMap.set('1', 'string 1')\nvar akey = {}\nvar bkey = {}\nmyMap.set(akey, { some: 'data' })\nmyMap.set(bkey, { some: 'other data' })\n```\n",
- "readmeFilename": "README.md",
- "_id": "pseudomap@1.0.1",
- "_shasum": "29b4e7f37bbbf3e3c9b9152981c40f33d56b2b28",
- "_resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.1.tgz",
- "_from": "pseudomap@>=1.0.1 <2.0.0"
+ "gitHead": "b6dc728207a0321ede6479e34506d3e0e13a940b",
+ "_id": "pseudomap@1.0.2",
+ "_shasum": "f052a28da70e618917ef0a8ac34c1ae5a68286b3",
+ "_from": "pseudomap@>=1.0.1 <2.0.0",
+ "_npmVersion": "3.3.2",
+ "_nodeVersion": "4.0.0",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "dist": {
+ "shasum": "f052a28da70e618917ef0a8ac34c1ae5a68286b3",
+ "tarball": "http://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/lru-cache/node_modules/pseudomap/pseudomap.js b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/pseudomap.js
new file mode 100644
index 00000000000000..25a21d829e8b3f
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/pseudomap/pseudomap.js
@@ -0,0 +1,113 @@
+var hasOwnProperty = Object.prototype.hasOwnProperty
+
+module.exports = PseudoMap
+
+function PseudoMap (set) {
+ if (!(this instanceof PseudoMap)) // whyyyyyyy
+ throw new TypeError("Constructor PseudoMap requires 'new'")
+
+ this.clear()
+
+ if (set) {
+ if ((set instanceof PseudoMap) ||
+ (typeof Map === 'function' && set instanceof Map))
+ set.forEach(function (value, key) {
+ this.set(key, value)
+ }, this)
+ else if (Array.isArray(set))
+ set.forEach(function (kv) {
+ this.set(kv[0], kv[1])
+ }, this)
+ else
+ throw new TypeError('invalid argument')
+ }
+}
+
+PseudoMap.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ Object.keys(this._data).forEach(function (k) {
+ if (k !== 'size')
+ fn.call(thisp, this._data[k].value, this._data[k].key)
+ }, this)
+}
+
+PseudoMap.prototype.has = function (k) {
+ return !!find(this._data, k)
+}
+
+PseudoMap.prototype.get = function (k) {
+ var res = find(this._data, k)
+ return res && res.value
+}
+
+PseudoMap.prototype.set = function (k, v) {
+ set(this._data, k, v)
+}
+
+PseudoMap.prototype.delete = function (k) {
+ var res = find(this._data, k)
+ if (res) {
+ delete this._data[res._index]
+ this._data.size--
+ }
+}
+
+PseudoMap.prototype.clear = function () {
+ var data = Object.create(null)
+ data.size = 0
+
+ Object.defineProperty(this, '_data', {
+ value: data,
+ enumerable: false,
+ configurable: true,
+ writable: false
+ })
+}
+
+Object.defineProperty(PseudoMap.prototype, 'size', {
+ get: function () {
+ return this._data.size
+ },
+ set: function (n) {},
+ enumerable: true,
+ configurable: true
+})
+
+PseudoMap.prototype.values =
+PseudoMap.prototype.keys =
+PseudoMap.prototype.entries = function () {
+ throw new Error('iterators are not implemented in this version')
+}
+
+// Either identical, or both NaN
+function same (a, b) {
+ return a === b || a !== a && b !== b
+}
+
+function Entry (k, v, i) {
+ this.key = k
+ this.value = v
+ this._index = i
+}
+
+function find (data, k) {
+ for (var i = 0, s = '_' + k, key = s;
+ hasOwnProperty.call(data, key);
+ key = s + i++) {
+ if (same(data[key].key, k))
+ return data[key]
+ }
+}
+
+function set (data, k, v) {
+ for (var i = 0, s = '_' + k, key = s;
+ hasOwnProperty.call(data, key);
+ key = s + i++) {
+ if (same(data[key].key, k)) {
+ data[key].value = v
+ return
+ }
+ }
+ data.size++
+ data[key] = new Entry(k, v, key)
+}
diff --git a/deps/npm/node_modules/which/.npmignore b/deps/npm/node_modules/lru-cache/node_modules/yallist/.npmignore
similarity index 75%
rename from deps/npm/node_modules/which/.npmignore
rename to deps/npm/node_modules/lru-cache/node_modules/yallist/.npmignore
index 0ac606ffcbed0a..534108e3f481cf 100644
--- a/deps/npm/node_modules/which/.npmignore
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/.npmignore
@@ -1,3 +1,4 @@
-.nyc_output/
-coverage/
node_modules/
+coverage/
+.nyc_output/
+nyc_output/
diff --git a/deps/npm/node_modules/which/.travis.yml b/deps/npm/node_modules/lru-cache/node_modules/yallist/.travis.yml
similarity index 89%
rename from deps/npm/node_modules/which/.travis.yml
rename to deps/npm/node_modules/lru-cache/node_modules/yallist/.travis.yml
index 7f22ad5a13250d..9c1a7b6ccd2c9e 100644
--- a/deps/npm/node_modules/which/.travis.yml
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/.travis.yml
@@ -4,3 +4,4 @@ node_js:
- '0.10'
- '0.12'
- '4'
+ - '5'
diff --git a/deps/npm/node_modules/lru-cache/node_modules/yallist/CONTRIBUTING.md b/deps/npm/node_modules/lru-cache/node_modules/yallist/CONTRIBUTING.md
new file mode 100644
index 00000000000000..5f51e6f1e53405
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/CONTRIBUTING.md
@@ -0,0 +1,4 @@
+1. The whole API is public. No underscore-prefixed pretend-private
+ things or hidden Object.create magic mumbo jumbo here. Plain old
+ objects that are created from constructors.
+2. 100% test coverage must be maintained.
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/lru-cache/node_modules/yallist/LICENSE
similarity index 100%
rename from deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/LICENSE
rename to deps/npm/node_modules/lru-cache/node_modules/yallist/LICENSE
diff --git a/deps/npm/node_modules/lru-cache/node_modules/yallist/README.md b/deps/npm/node_modules/lru-cache/node_modules/yallist/README.md
new file mode 100644
index 00000000000000..f5861018696688
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/README.md
@@ -0,0 +1,204 @@
+# yallist
+
+Yet Another Linked List
+
+There are many doubly-linked list implementations like it, but this
+one is mine.
+
+For when an array would be too big, and a Map can't be iterated in
+reverse order.
+
+
+[](https://travis-ci.org/isaacs/yallist) [](https://coveralls.io/github/isaacs/yallist)
+
+## basic usage
+
+```javascript
+var yallist = require('yallist')
+var myList = yallist.create([1, 2, 3])
+myList.push('foo')
+myList.unshift('bar')
+// of course pop() and shift() are there, too
+console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
+myList.forEach(function (k) {
+ // walk the list head to tail
+})
+myList.forEachReverse(function (k, index, list) {
+ // walk the list tail to head
+})
+var myDoubledList = myList.map(function (k) {
+ return k + k
+})
+// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
+// mapReverse is also a thing
+var myDoubledListReverse = myList.mapReverse(function (k) {
+ return k + k
+}) // ['foofoo', 6, 4, 2, 'barbar']
+
+var reduced = myList.reduce(function (set, entry) {
+ set += entry
+ return set
+}, 'start')
+console.log(reduced) // 'startfoo123bar'
+```
+
+## api
+
+The whole API is considered "public".
+
+Functions with the same name as an Array method work more or less the
+same way.
+
+There's reverse versions of most things because that's the point.
+
+### Yallist
+
+Default export, the class that holds and manages a list.
+
+Call it with either a forEach-able (like an array) or a set of
+arguments, to initialize the list.
+
+The Array-ish methods all act like you'd expect. No magic length,
+though, so if you change that it won't automatically prune or add
+empty spots.
+
+### Yallist.create(..)
+
+Alias for Yallist function. Some people like factories.
+
+#### yallist.head
+
+The first node in the list
+
+#### yallist.tail
+
+The last node in the list
+
+#### yallist.length
+
+The number of nodes in the list. (Change this at your peril. It is
+not magic like Array length.)
+
+#### yallist.toArray()
+
+Convert the list to an array.
+
+#### yallist.forEach(fn, [thisp])
+
+Call a function on each item in the list.
+
+#### yallist.forEachReverse(fn, [thisp])
+
+Call a function on each item in the list, in reverse order.
+
+#### yallist.get(n)
+
+Get the data at position `n` in the list. If you use this a lot,
+probably better off just using an Array.
+
+#### yallist.getReverse(n)
+
+Get the data at position `n`, counting from the tail.
+
+#### yallist.map(fn, thisp)
+
+Create a new Yallist with the result of calling the function on each
+item.
+
+#### yallist.mapReverse(fn, thisp)
+
+Same as `map`, but in reverse.
+
+#### yallist.pop()
+
+Get the data from the list tail, and remove the tail from the list.
+
+#### yallist.push(item, ...)
+
+Insert one or more items to the tail of the list.
+
+#### yallist.reduce(fn, initialValue)
+
+Like Array.reduce.
+
+#### yallist.reduceReverse
+
+Like Array.reduce, but in reverse.
+
+#### yallist.reverse
+
+Reverse the list in place.
+
+#### yallist.shift()
+
+Get the data from the list head, and remove the head from the list.
+
+#### yallist.slice([from], [to])
+
+Just like Array.slice, but returns a new Yallist.
+
+#### yallist.sliceReverse([from], [to])
+
+Just like yallist.slice, but the result is returned in reverse.
+
+#### yallist.toArray()
+
+Create an array representation of the list.
+
+#### yallist.toArrayReverse()
+
+Create a reversed array representation of the list.
+
+#### yallist.unshift(item, ...)
+
+Insert one or more items to the head of the list.
+
+#### yallist.unshiftNode(node)
+
+Move a Node object to the front of the list. (That is, pull it out of
+wherever it lives, and make it the new head.)
+
+If the node belongs to a different list, then that list will remove it
+first.
+
+#### yallist.pushNode(node)
+
+Move a Node object to the end of the list. (That is, pull it out of
+wherever it lives, and make it the new tail.)
+
+If the node belongs to a list already, then that list will remove it
+first.
+
+#### yallist.removeNode(node)
+
+Remove a node from the list, preserving referential integrity of head
+and tail and other nodes.
+
+Will throw an error if you try to have a list remove a node that
+doesn't belong to it.
+
+### Yallist.Node
+
+The class that holds the data and is actually the list.
+
+Call with `var n = new Node(value, previousNode, nextNode)`
+
+Note that if you do direct operations on Nodes themselves, it's very
+easy to get into weird states where the list is broken. Be careful :)
+
+#### node.next
+
+The next node in the list.
+
+#### node.prev
+
+The previous node in the list.
+
+#### node.value
+
+The data the node contains.
+
+#### node.list
+
+The list to which this node belongs. (Null if it does not belong to
+any list.)
diff --git a/deps/npm/node_modules/lru-cache/node_modules/yallist/package.json b/deps/npm/node_modules/lru-cache/node_modules/yallist/package.json
new file mode 100644
index 00000000000000..18ba21472cde00
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/package.json
@@ -0,0 +1,52 @@
+{
+ "name": "yallist",
+ "version": "2.0.0",
+ "description": "Yet Another Linked List",
+ "main": "yallist.js",
+ "directories": {
+ "test": "test"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "^2.3.2"
+ },
+ "scripts": {
+ "test": "tap test/*.js --cov"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "license": "ISC",
+ "gitHead": "702eaba87deefa9f8fc2f8e36cb225bc2141fdc3",
+ "bugs": {
+ "url": "https://github.com/isaacs/yallist/issues"
+ },
+ "homepage": "https://github.com/isaacs/yallist#readme",
+ "_id": "yallist@2.0.0",
+ "_shasum": "306c543835f09ee1a4cb23b7bce9ab341c91cdd4",
+ "_from": "yallist@>=2.0.0 <3.0.0",
+ "_npmVersion": "3.3.2",
+ "_nodeVersion": "4.0.0",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "dist": {
+ "shasum": "306c543835f09ee1a4cb23b7bce9ab341c91cdd4",
+ "tarball": "http://registry.npmjs.org/yallist/-/yallist-2.0.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "_resolved": "https://registry.npmjs.org/yallist/-/yallist-2.0.0.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/lru-cache/node_modules/yallist/test/basic.js b/deps/npm/node_modules/lru-cache/node_modules/yallist/test/basic.js
new file mode 100644
index 00000000000000..3c8d1f48d904a2
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/test/basic.js
@@ -0,0 +1,188 @@
+var t = require('tap')
+var Yallist = require('../yallist.js')
+
+var y = new Yallist(1,2,3,4,5)
+var z = new Yallist([1,2,3,4,5])
+t.similar(y, z, 'build from single list or args')
+
+function add10 (i) {
+ return i + 10
+}
+t.similar(y.map(add10).toArray(), [11, 12, 13, 14, 15])
+t.similar(y.mapReverse(add10).toArray(), [15, 14, 13, 12, 11])
+
+t.similar(y.map(add10).toArrayReverse(), [15, 14, 13, 12, 11])
+t.isa(Yallist(1,2,3), 'Yallist')
+t.equal(y.push(6, 7, 8), 8)
+t.similar(y.toArray(), [1, 2, 3, 4, 5, 6, 7, 8])
+y.pop()
+y.shift()
+y.unshift(100)
+
+var expect = [100, 2, 3, 4, 5, 6, 7]
+var expectReverse = [ 7, 6, 5, 4, 3, 2, 100 ]
+
+t.similar(y.toArray(), expect)
+t.equal(y.length, y.toArray().length)
+
+t.test(function forEach (t) {
+ t.plan(y.length * 2)
+ y.forEach(function (item, i, list) {
+ t.equal(item, expect[i])
+ t.equal(list, y)
+ })
+})
+
+t.test(function forEach (t) {
+ t.plan(y.length * 5)
+ var n = 0
+ y.forEachReverse(function (item, i, list) {
+ t.equal(item, expectReverse[n])
+ t.equal(item, expect[i])
+ t.equal(item, y.get(i))
+ t.equal(item, y.getReverse(n))
+ n += 1
+ t.equal(list, y)
+ })
+})
+
+t.equal(y.getReverse(100), undefined)
+
+t.equal(y.get(9999), undefined)
+
+
+function sum (a, b) { return a + b }
+t.equal(y.reduce(sum), 127)
+t.equal(y.reduce(sum, 100), 227)
+t.equal(y.reduceReverse(sum), 127)
+t.equal(y.reduceReverse(sum, 100), 227)
+
+t.equal(Yallist().pop(), undefined)
+t.equal(Yallist().shift(), undefined)
+
+var x = Yallist()
+x.unshift(1)
+t.equal(x.length, 1)
+t.similar(x.toArray(), [1])
+
+// verify that y.toArray() returns an array and if we create a
+// new Yallist from that array, we get a list matching
+t.similar(Yallist(y.toArray()), y)
+t.similar(Yallist.apply(null, y.toArray()), y)
+
+t.throws(function () {
+ new Yallist().reduce(function () {})
+}, {}, new TypeError('Reduce of empty list with no initial value'))
+t.throws(function () {
+ new Yallist().reduceReverse(function () {})
+}, {}, new TypeError('Reduce of empty list with no initial value'))
+
+var z = y.reverse()
+t.equal(z, y)
+t.similar(y.toArray(), expectReverse)
+y.reverse()
+t.similar(y.toArray(), expect)
+
+var a = Yallist(1,2,3,4,5,6)
+var cases = [
+ [ [2, 4], [3, 4] ],
+ [ [2, -4], [] ],
+ [ [2, -2], [3, 4] ],
+ [ [1, -2], [2, 3, 4] ],
+ [ [-1, -2], [] ],
+ [ [-5, -2], [2, 3, 4] ],
+ [ [-99, 2], [1, 2] ],
+ [ [5, 99], [6] ],
+ [ [], [1,2,3,4,5,6] ]
+]
+t.test('slice', function (t) {
+ t.plan(cases.length)
+ cases.forEach(function (c) {
+ t.test(JSON.stringify(c), function (t) {
+ t.similar(a.slice.apply(a, c[0]), Yallist(c[1]))
+ t.similar([].slice.apply(a.toArray(), c[0]), c[1])
+ t.end()
+ })
+ })
+})
+
+t.test('sliceReverse', function (t) {
+ t.plan(cases.length)
+ cases.forEach(function (c) {
+ var rev = c[1].slice().reverse()
+ t.test(JSON.stringify([c[0], rev]), function (t) {
+ t.similar(a.sliceReverse.apply(a, c[0]), Yallist(rev))
+ t.similar([].slice.apply(a.toArray(), c[0]).reverse(), rev)
+ t.end()
+ })
+ })
+})
+
+var inserter = Yallist(1,2,3,4,5)
+inserter.unshiftNode(inserter.head.next)
+t.similar(inserter.toArray(), [2,1,3,4,5])
+inserter.unshiftNode(inserter.tail)
+t.similar(inserter.toArray(), [5,2,1,3,4])
+inserter.unshiftNode(inserter.head)
+t.similar(inserter.toArray(), [5,2,1,3,4])
+
+var single = Yallist(1)
+single.unshiftNode(single.head)
+t.similar(single.toArray(), [1])
+
+inserter = Yallist(1,2,3,4,5)
+inserter.pushNode(inserter.tail.prev)
+t.similar(inserter.toArray(), [1,2,3,5,4])
+inserter.pushNode(inserter.head)
+t.similar(inserter.toArray(), [2,3,5,4,1])
+inserter.unshiftNode(inserter.head)
+t.similar(inserter.toArray(), [2,3,5,4,1])
+
+single = Yallist(1)
+single.pushNode(single.tail)
+t.similar(single.toArray(), [1])
+
+var swiped = Yallist(9,8,7)
+inserter.unshiftNode(swiped.head.next)
+t.similar(inserter.toArray(), [8,2,3,5,4,1])
+t.similar(swiped.toArray(), [9,7])
+
+swiped = Yallist(9,8,7)
+inserter.pushNode(swiped.head.next)
+t.similar(inserter.toArray(), [8,2,3,5,4,1,8])
+t.similar(swiped.toArray(), [9,7])
+
+swiped.unshiftNode(Yallist.Node(99))
+t.similar(swiped.toArray(), [99,9,7])
+swiped.pushNode(Yallist.Node(66))
+t.similar(swiped.toArray(), [99,9,7,66])
+
+var e = Yallist()
+e.unshiftNode(Yallist.Node(1))
+t.same(e.toArray(), [1])
+e = Yallist()
+e.pushNode(Yallist.Node(1))
+t.same(e.toArray(), [1])
+
+// steal them back, don't break the lists
+swiped.unshiftNode(inserter.head)
+t.same(swiped, Yallist(8,99,9,7,66))
+t.same(inserter, Yallist(2,3,5,4,1,8))
+swiped.unshiftNode(inserter.tail)
+t.same(inserter, Yallist(2,3,5,4,1))
+t.same(swiped, Yallist(8,8,99,9,7,66))
+
+
+t.throws(function remove_foreign_node () {
+ e.removeNode(swiped.head)
+}, {}, new Error('removing node which does not belong to this list'))
+t.throws(function remove_unlisted_node () {
+ e.removeNode(Yallist.Node('nope'))
+}, {}, new Error('removing node which does not belong to this list'))
+
+e = Yallist(1,2)
+e.removeNode(e.head)
+t.same(e, Yallist(2))
+e = Yallist(1,2)
+e.removeNode(e.tail)
+t.same(e, Yallist(1))
diff --git a/deps/npm/node_modules/lru-cache/node_modules/yallist/yallist.js b/deps/npm/node_modules/lru-cache/node_modules/yallist/yallist.js
new file mode 100644
index 00000000000000..fb60fc2878b438
--- /dev/null
+++ b/deps/npm/node_modules/lru-cache/node_modules/yallist/yallist.js
@@ -0,0 +1,360 @@
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length --
+ node.next = null
+ node.prev = null
+ node.list = null
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length ++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length ++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail)
+ return undefined
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ this.tail.next = null
+ this.length --
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head)
+ return undefined
+
+ var res = this.head.value
+ this.head = this.head.next
+ this.head.prev = null
+ this.length --
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null; ) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length ++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length ++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
diff --git a/deps/npm/node_modules/lru-cache/package.json b/deps/npm/node_modules/lru-cache/package.json
index 04e14d44bbdbbc..47aedc7bdda2b2 100644
--- a/deps/npm/node_modules/lru-cache/package.json
+++ b/deps/npm/node_modules/lru-cache/package.json
@@ -1,7 +1,7 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
- "version": "3.2.0",
+ "version": "4.0.1",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
@@ -12,7 +12,8 @@
"cache"
],
"scripts": {
- "test": "tap test --gc"
+ "test": "tap test --branches=100 --functions=100 --lines=100 --statements=100",
+ "posttest": "standard test/*.js lib/*.js"
},
"main": "lib/lru-cache.js",
"repository": {
@@ -20,21 +21,50 @@
"url": "git://github.com/isaacs/node-lru-cache.git"
},
"devDependencies": {
- "tap": "^1.2.0",
- "weak": ""
+ "standard": "^5.4.1",
+ "tap": "^5.1.1"
},
"license": "ISC",
"dependencies": {
- "pseudomap": "^1.0.1"
+ "pseudomap": "^1.0.1",
+ "yallist": "^2.0.0"
},
- "readme": "# lru cache\n\nA cache object that deletes the least-recently-used items.\n\n## Usage:\n\n```javascript\nvar LRU = require(\"lru-cache\")\n , options = { max: 500\n , length: function (n, key) { return n * 2 + key.length }\n , dispose: function (key, n) { n.close() }\n , maxAge: 1000 * 60 * 60 }\n , cache = LRU(options)\n , otherCache = LRU(50) // sets just the max size\n\ncache.set(\"key\", \"value\")\ncache.get(\"key\") // \"value\"\n\n// non-string keys ARE fully supported\nvar someObject = {}\ncache.set(someObject, 'a value')\ncache.set('[object Object]', 'a different value')\nassert.equal(cache.get(someObject), 'a value')\n\ncache.reset() // empty the cache\n```\n\nIf you put more stuff in it, then items will fall out.\n\nIf you try to put an oversized thing in it, then it'll fall out right\naway.\n\n## Options\n\n* `max` The maximum size of the cache, checked by applying the length\n function to all values in the cache. Not setting this is kind of\n silly, since that's the whole purpose of this lib, but it defaults\n to `Infinity`.\n* `maxAge` Maximum age in ms. Items are not pro-actively pruned out\n as they age, but if you try to get an item that is too old, it'll\n drop it and return undefined instead of giving it to you.\n* `length` Function that is used to calculate the length of stored\n items. If you're storing strings or buffers, then you probably want\n to do something like `function(n, key){return n.length}`. The default is\n `function(){return 1}`, which is fine if you want to store `max`\n like-sized things. They item is passed as the first argument, and\n the key is passed as the second argumnet.\n* `dispose` Function that is called on items when they are dropped\n from the cache. This can be handy if you want to close file\n descriptors or do other cleanup tasks when items are no longer\n accessible. Called with `key, value`. It's called *before*\n actually removing the item from the internal cache, so if you want\n to immediately put it back in, you'll have to do that in a\n `nextTick` or `setTimeout` callback or it won't do anything.\n* `stale` By default, if you set a `maxAge`, it'll only actually pull\n stale items out of the cache when you `get(key)`. (That is, it's\n not pre-emptively doing a `setTimeout` or anything.) If you set\n `stale:true`, it'll return the stale value before deleting it. If\n you don't set this, then it'll return `undefined` when you try to\n get a stale entry, as if it had already been deleted.\n\n## API\n\n* `set(key, value, maxAge)`\n* `get(key) => value`\n\n Both of these will update the \"recently used\"-ness of the key.\n They do what you think. `max` is optional and overrides the\n cache `max` option if provided.\n\n* `peek(key)`\n\n Returns the key value (or `undefined` if not found) without\n updating the \"recently used\"-ness of the key.\n\n (If you find yourself using this a lot, you *might* be using the\n wrong sort of data structure, but there are some use cases where\n it's handy.)\n\n* `del(key)`\n\n Deletes a key out of the cache.\n\n* `reset()`\n\n Clear the cache entirely, throwing away all values.\n\n* `has(key)`\n\n Check if a key is in the cache, without updating the recent-ness\n or deleting it for being stale.\n\n* `forEach(function(value,key,cache), [thisp])`\n\n Just like `Array.prototype.forEach`. Iterates over all the keys\n in the cache, in order of recent-ness. (Ie, more recently used\n items are iterated over first.)\n\n* `rforEach(function(value,key,cache), [thisp])`\n\n The same as `cache.forEach(...)` but items are iterated over in\n reverse order. (ie, less recently used items are iterated over\n first.)\n\n* `keys()`\n\n Return an array of the keys in the cache.\n\n* `values()`\n\n Return an array of the values in the cache.\n\n* `length()`\n\n Return total length of objects in cache taking into account\n `length` options function.\n\n* `itemCount`\n\n Return total quantity of objects currently in cache. Note, that\n `stale` (see options) items are returned as part of this item\n count.\n\n* `dump()`\n\n Return an array of the cache entries ready for serialization and usage\n with 'destinationCache.load(arr)`.\n\n* `load(cacheEntriesArray)`\n\n Loads another cache entries array, obtained with `sourceCache.dump()`,\n into the cache. The destination cache is reset before loading new entries\n",
- "readmeFilename": "README.md",
+ "files": [
+ "lib/lru-cache.js"
+ ],
+ "gitHead": "6cd8c8a43cf56c585bdb696faae94f9836cb9e28",
"bugs": {
"url": "https://github.com/isaacs/node-lru-cache/issues"
},
"homepage": "https://github.com/isaacs/node-lru-cache#readme",
- "_id": "lru-cache@3.2.0",
- "_shasum": "71789b3b7f5399bec8565dda38aa30d2a097efee",
- "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-3.2.0.tgz",
- "_from": "lru-cache@>=3.2.0 <3.3.0"
+ "_id": "lru-cache@4.0.1",
+ "_shasum": "1343955edaf2e37d9b9e7ee7241e27c4b9fb72be",
+ "_from": "lru-cache@4.0.1",
+ "_npmVersion": "3.7.3",
+ "_nodeVersion": "5.6.0",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "dist": {
+ "shasum": "1343955edaf2e37d9b9e7ee7241e27c4b9fb72be",
+ "tarball": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.1.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "isaacs@npmjs.com"
+ },
+ {
+ "name": "othiym23",
+ "email": "ogd@aoaioxxysz.net"
+ }
+ ],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/lru-cache-4.0.1.tgz_1458667372415_0.8005518841091543"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.1.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/lru-cache/test/basic.js b/deps/npm/node_modules/lru-cache/test/basic.js
deleted file mode 100644
index 02000a79593c31..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/basic.js
+++ /dev/null
@@ -1,427 +0,0 @@
-var test = require("tap").test
- , LRU = require("../")
-
-test("basic", function (t) {
- var cache = new LRU({max: 10})
- cache.set("key", "value")
- t.equal(cache.get("key"), "value")
- t.equal(cache.get("nada"), undefined)
- t.equal(cache.length, 1)
- t.equal(cache.max, 10)
- t.end()
-})
-
-test("least recently set", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("lru recently gotten", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.get("a")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("a"), "A")
- t.end()
-})
-
-test("del", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.del("a")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("max", function (t) {
- var cache = new LRU(3)
-
- // test changing the max, verify that the LRU items get dropped.
- cache.max = 100
- for (var i = 0; i < 100; i ++) cache.set(i, i)
- t.equal(cache.length, 100)
- for (var i = 0; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- cache.max = 3
- t.equal(cache.length, 3)
- for (var i = 0; i < 97; i ++) {
- t.equal(cache.get(i), undefined)
- }
- for (var i = 98; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
-
- // now remove the max restriction, and try again.
- cache.max = "hello"
- for (var i = 0; i < 100; i ++) cache.set(i, i)
- t.equal(cache.length, 100)
- for (var i = 0; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- // should trigger an immediate resize
- cache.max = 3
- t.equal(cache.length, 3)
- for (var i = 0; i < 97; i ++) {
- t.equal(cache.get(i), undefined)
- }
- for (var i = 98; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- t.end()
-})
-
-test("reset", function (t) {
- var cache = new LRU(10)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.reset()
- t.equal(cache.length, 0)
- t.equal(cache.max, 10)
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.end()
-})
-
-
-test("basic with weighed length", function (t) {
- var cache = new LRU({
- max: 100,
- length: function (item, key) {
- t.isa(key, 'string')
- return item.size
- }
- })
- cache.set("key", {val: "value", size: 50})
- t.equal(cache.get("key").val, "value")
- t.equal(cache.get("nada"), undefined)
- t.equal(cache.lengthCalculator(cache.get("key"), 'key'), 50)
- t.equal(cache.length, 50)
- t.equal(cache.max, 100)
- t.end()
-})
-
-
-test("weighed length item too large", function (t) {
- var cache = new LRU({
- max: 10,
- length: function (item) { return item.size }
- })
- t.equal(cache.max, 10)
-
- // should fall out immediately
- cache.set("key", {val: "value", size: 50})
-
- t.equal(cache.length, 0)
- t.equal(cache.get("key"), undefined)
- t.end()
-})
-
-test("least recently set with weighed length", function (t) {
- var cache = new LRU({
- max:8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- cache.set("d", "DDDD")
- t.equal(cache.get("d"), "DDDD")
- t.equal(cache.get("c"), "CCC")
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("lru recently gotten with weighed length", function (t) {
- var cache = new LRU({
- max: 8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- cache.get("a")
- cache.get("b")
- cache.set("d", "DDDD")
- t.equal(cache.get("c"), undefined)
- t.equal(cache.get("d"), "DDDD")
- t.equal(cache.get("b"), "BB")
- t.equal(cache.get("a"), "A")
- t.end()
-})
-
-test("lru recently updated with weighed length", function (t) {
- var cache = new LRU({
- max: 8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- t.equal(cache.length, 6) //CCC BB A
- cache.set("a", "+A")
- t.equal(cache.length, 7) //+A CCC BB
- cache.set("b", "++BB")
- t.equal(cache.length, 6) //++BB +A
- t.equal(cache.get("c"), undefined)
-
- cache.set("c", "oversized")
- t.equal(cache.length, 6) //++BB +A
- t.equal(cache.get("c"), undefined)
-
- cache.set("a", "oversized")
- t.equal(cache.length, 4) //++BB
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), "++BB")
- t.end()
-})
-
-test("set returns proper booleans", function(t) {
- var cache = new LRU({
- max: 5,
- length: function (item) { return item.length }
- })
-
- t.equal(cache.set("a", "A"), true)
-
- // should return false for max exceeded
- t.equal(cache.set("b", "donuts"), false)
-
- t.equal(cache.set("b", "B"), true)
- t.equal(cache.set("c", "CCCC"), true)
- t.end()
-})
-
-test("drop the old items", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- cache.set("a", "A")
-
- setTimeout(function () {
- cache.set("b", "b")
- t.equal(cache.get("a"), "A")
- }, 25)
-
- setTimeout(function () {
- cache.set("c", "C")
- // timed out
- t.notOk(cache.get("a"))
- }, 60 + 25)
-
- setTimeout(function () {
- t.notOk(cache.get("b"))
- t.equal(cache.get("c"), "C")
- }, 90)
-
- setTimeout(function () {
- t.notOk(cache.get("c"))
- t.end()
- }, 155)
-})
-
-test("individual item can have its own maxAge", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- cache.set("a", "A", 20)
- setTimeout(function () {
- t.notOk(cache.get("a"))
- t.end()
- }, 25)
-})
-
-test("individual item can have its own maxAge > cache's", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 20
- })
-
- cache.set("a", "A", 50)
- setTimeout(function () {
- t.equal(cache.get("a"), "A")
- t.end()
- }, 25)
-})
-
-test("disposal function", function(t) {
- var disposed = false
- var cache = new LRU({
- max: 1,
- dispose: function (k, n) {
- disposed = n
- }
- })
-
- cache.set(1, 1)
- cache.set(2, 2)
- t.equal(disposed, 1)
- cache.set(3, 3)
- t.equal(disposed, 2)
- cache.reset()
- t.equal(disposed, 3)
- t.end()
-})
-
-test("disposal function on too big of item", function(t) {
- var disposed = false
- var cache = new LRU({
- max: 1,
- length: function (k) {
- return k.length
- },
- dispose: function (k, n) {
- disposed = n
- }
- })
- var obj = [ 1, 2 ]
-
- t.equal(disposed, false)
- cache.set("obj", obj)
- t.equal(disposed, obj)
- t.end()
-})
-
-test("has()", function(t) {
- var cache = new LRU({
- max: 1,
- maxAge: 10
- })
-
- cache.set('foo', 'bar')
- t.equal(cache.has('foo'), true)
- cache.set('blu', 'baz')
- t.equal(cache.has('foo'), false)
- t.equal(cache.has('blu'), true)
- setTimeout(function() {
- t.equal(cache.has('blu'), false)
- t.end()
- }, 15)
-})
-
-test("stale", function(t) {
- var cache = new LRU({
- maxAge: 10,
- stale: true
- })
-
- cache.set('foo', 'bar')
- t.equal(cache.get('foo'), 'bar')
- t.equal(cache.has('foo'), true)
- setTimeout(function() {
- t.equal(cache.has('foo'), false)
- t.equal(cache.get('foo'), 'bar')
- t.equal(cache.get('foo'), undefined)
- t.end()
- }, 15)
-})
-
-test("lru update via set", function(t) {
- var cache = LRU({ max: 2 });
-
- cache.set('foo', 1);
- cache.set('bar', 2);
- cache.del('bar');
- cache.set('baz', 3);
- cache.set('qux', 4);
-
- t.equal(cache.get('foo'), undefined)
- t.equal(cache.get('bar'), undefined)
- t.equal(cache.get('baz'), 3)
- t.equal(cache.get('qux'), 4)
- t.end()
-})
-
-test("least recently set w/ peek", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- t.equal(cache.peek("a"), "A")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("pop the least used item", function (t) {
- var cache = new LRU(3)
- , last
-
- cache.set("a", "A")
- cache.set("b", "B")
- cache.set("c", "C")
-
- t.equal(cache.length, 3)
- t.equal(cache.max, 3)
-
- // Ensure we pop a, c, b
- cache.get("b", "B")
-
- last = cache.pop()
- t.equal(last.key, "a")
- t.equal(last.value, "A")
- t.equal(cache.length, 2)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last.key, "c")
- t.equal(last.value, "C")
- t.equal(cache.length, 1)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last.key, "b")
- t.equal(last.value, "B")
- t.equal(cache.length, 0)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last, null)
- t.equal(cache.length, 0)
- t.equal(cache.max, 3)
-
- t.end()
-})
-
-test("get and set only accepts strings and numbers as keys", function(t) {
- var cache = new LRU()
-
- cache.set("key", "value")
- cache.set(123, 456)
-
- t.equal(cache.get("key"), "value")
- t.equal(cache.get(123), 456)
-
- t.end()
-})
-
-test("peek with wierd keys", function(t) {
- var cache = new LRU()
-
- cache.set("key", "value")
- cache.set(123, 456)
-
- t.equal(cache.peek("key"), "value")
- t.equal(cache.peek(123), 456)
-
- t.equal(cache.peek({
- toString: function() { return "key" }
- }), undefined)
-
- t.end()
-})
diff --git a/deps/npm/node_modules/lru-cache/test/foreach.js b/deps/npm/node_modules/lru-cache/test/foreach.js
deleted file mode 100644
index d037d41c33af16..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/foreach.js
+++ /dev/null
@@ -1,130 +0,0 @@
-var test = require('tap').test
-var LRU = require('../')
-
-test('forEach', function (t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i, i.toString(2))
- }
-
- var i = 9
- l.forEach(function (val, key, cache) {
- t.equal(cache, l)
- t.equal(key, i)
- t.equal(val, i.toString(2))
- i -= 1
- })
-
- // get in order of most recently used
- l.get(6)
- l.get(8)
-
- var order = [ 8, 6, 9, 7, 5 ]
- var i = 0
-
- l.forEach(function (val, key, cache) {
- var j = order[i ++]
- t.equal(cache, l)
- t.equal(key, j)
- t.equal(val, j.toString(2))
- })
- t.equal(i, order.length)
-
- i = 0
- order.reverse()
- l.rforEach(function (val, key, cache) {
- var j = order[i ++]
- t.equal(cache, l)
- t.equal(key, j)
- t.equal(val, j.toString(2))
- })
- t.equal(i, order.length)
-
- t.end()
-})
-
-test('keys() and values()', function (t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i, i.toString(2))
- }
-
- t.similar(l.keys(), [9, 8, 7, 6, 5])
- t.similar(l.values(), ['1001', '1000', '111', '110', '101'])
-
- // get in order of most recently used
- l.get(6)
- l.get(8)
-
- t.similar(l.keys(), [8, 6, 9, 7, 5])
- t.similar(l.values(), ['1000', '110', '1001', '111', '101'])
-
- t.end()
-})
-
-test('all entries are iterated over', function(t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- var i = 0
- l.forEach(function (val, key, cache) {
- if (i > 0) {
- cache.del(key)
- }
- i += 1
- })
-
- t.equal(i, 5)
- t.equal(l.keys().length, 1)
-
- t.end()
-})
-
-test('all stale entries are removed', function(t) {
- var l = new LRU({ max: 5, maxAge: -5, stale: true })
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- var i = 0
- l.forEach(function () {
- i += 1
- })
-
- t.equal(i, 5)
- t.equal(l.keys().length, 0)
-
- t.end()
-})
-
-test('expires', function (t) {
- var l = new LRU({
- max: 10,
- maxAge: 50
- })
- for (var i = 0; i < 10; i++) {
- l.set(i.toString(), i.toString(2), ((i % 2) ? 25 : undefined))
- }
-
- var i = 0
- var order = [ 8, 6, 4, 2, 0 ]
- setTimeout(function () {
- l.forEach(function (val, key, cache) {
- var j = order[i++]
- t.equal(cache, l)
- t.equal(key, j.toString())
- t.equal(val, j.toString(2))
- })
- t.equal(i, order.length);
-
- setTimeout(function () {
- var count = 0;
- l.forEach(function (val, key, cache) { count++; })
- t.equal(0, count);
- t.end()
- }, 25)
-
- }, 26)
-})
diff --git a/deps/npm/node_modules/lru-cache/test/memory-leak.js b/deps/npm/node_modules/lru-cache/test/memory-leak.js
deleted file mode 100644
index b5912f6f168e5e..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/memory-leak.js
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env node --expose_gc
-
-
-var weak = require('weak');
-var test = require('tap').test
-var LRU = require('../')
-var l = new LRU({ max: 10 })
-var refs = 0
-function X() {
- refs ++
- weak(this, deref)
-}
-
-function deref() {
- refs --
-}
-
-test('no leaks', function (t) {
- // fill up the cache
- for (var i = 0; i < 100; i++) {
- l.set(i, new X);
- // throw some gets in there, too.
- if (i % 2 === 0)
- l.get(i / 2)
- }
-
- gc()
-
- var start = process.memoryUsage()
-
- // capture the memory
- var startRefs = refs
-
- // do it again, but more
- for (var i = 0; i < 10000; i++) {
- l.set(i, new X);
- // throw some gets in there, too.
- if (i % 2 === 0)
- l.get(i / 2)
- }
-
- gc()
-
- var end = process.memoryUsage()
- t.equal(refs, startRefs, 'no leaky refs')
-
- console.error('start: %j\n' +
- 'end: %j', start, end);
- t.pass();
- t.end();
-})
diff --git a/deps/npm/node_modules/lru-cache/test/overflow.js b/deps/npm/node_modules/lru-cache/test/overflow.js
deleted file mode 100644
index 238af699b0a0c0..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/overflow.js
+++ /dev/null
@@ -1,45 +0,0 @@
-var LRU = require('../')
-var t = require('tap')
-
-var c = new LRU(5)
-
-// now the hacksy bitses
-c._mru = Number.MAX_SAFE_INTEGER - 10
-
-function test (c) {
- t.test('mru=' + c._mru + ', lru=' + c._lru, function (t) {
- t.equal(c.length, 5)
- t.equal(c._cache.get(0), undefined)
- t.equal(c._cache.get(1).value, 1)
- t.equal(c._cache.get(2).value, 2)
- t.equal(c._cache.get(3).value, 3)
- t.equal(c._cache.get(4).value, 4)
- t.equal(c._cache.get(5).value, 5)
- t.ok(c._mru < Number.MAX_SAFE_INTEGER, 'did not overflow')
- t.end()
- })
-}
-
-for (var i = 0; i < 6; i++) {
- c.set(i, i)
-}
-
-test(c)
-
-for (var i = 0; i < 6; i++) {
- c.set(i, i)
-}
-
-test(c)
-
-for (var i = 0; i < 6; i++) {
- c.set(i, i)
-}
-
-test(c)
-
-for (var i = 0; i < 6; i++) {
- c.set(i, i)
-}
-
-test(c)
diff --git a/deps/npm/node_modules/lru-cache/test/serialize.js b/deps/npm/node_modules/lru-cache/test/serialize.js
deleted file mode 100644
index b87eb5d0b0383d..00000000000000
--- a/deps/npm/node_modules/lru-cache/test/serialize.js
+++ /dev/null
@@ -1,224 +0,0 @@
-var test = require('tap').test
-var LRU = require('../')
-
-test('dump', function (t) {
- var cache = new LRU()
-
- t.equal(cache.dump().length, 0, "nothing in dump for empty cache")
-
- cache.set("a", "A")
- cache.set("b", "B")
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 },
- { k: "a", v: "A", e: 0 }
- ])
-
- cache.set(123, 456)
- t.deepEqual(cache.dump(), [
- { k: 123, v: 456, e: 0 },
- { k: "b", v: "B", e: 0 },
- { k: "a", v: "A", e: 0 },
- ])
- cache.del(123)
-
- cache.set("a", "A");
- t.deepEqual(cache.dump(), [
- { k: "a", v: "A", e: 0 },
- { k: "b", v: "B", e: 0 }
- ])
-
- cache.get("b");
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 },
- { k: "a", v: "A", e: 0 }
- ])
-
- cache.del("a");
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 }
- ])
-
- t.end()
-})
-
-test("do not dump stale items", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- //expires at 50
- cache.set("a", "A")
-
- setTimeout(function () {
- //expires at 75
- cache.set("b", "B")
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "b")
- t.equal(s[1].k, "a")
- }, 25)
-
- setTimeout(function () {
- //expires at 110
- cache.set("c", "C")
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "c")
- t.equal(s[1].k, "b")
- }, 60)
-
- setTimeout(function () {
- //expires at 130
- cache.set("d", "D", 40)
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "d")
- t.equal(s[1].k, "c")
- }, 90)
-
- setTimeout(function () {
- var s = cache.dump()
- t.equal(s.length, 1)
- t.equal(s[0].k, "d")
- }, 120)
-
- setTimeout(function () {
- var s = cache.dump()
- t.deepEqual(s, [])
- t.end()
- }, 155)
-})
-
-test("load basic cache", function(t) {
- var cache = new LRU(),
- copy = new LRU()
-
- cache.set("a", "A")
- cache.set("b", "B")
- cache.set(123, 456)
-
- copy.load(cache.dump())
- t.deepEquals(cache.dump(), copy.dump())
-
- t.end()
-})
-
-
-test("load staled cache", function(t) {
- var cache = new LRU({maxAge: 50}),
- copy = new LRU({maxAge: 50}),
- arr
-
- //expires at 50
- cache.set("a", "A")
- setTimeout(function () {
- //expires at 80
- cache.set("b", "B")
- arr = cache.dump()
- t.equal(arr.length, 2)
- }, 30)
-
- setTimeout(function () {
- copy.load(arr)
- t.equal(copy.get("a"), undefined)
- t.equal(copy.get("b"), "B")
- }, 60)
-
- setTimeout(function () {
- t.equal(copy.get("b"), undefined)
- t.end()
- }, 90)
-})
-
-test("load to other size cache", function(t) {
- var cache = new LRU({max: 2}),
- copy = new LRU({max: 1})
-
- cache.set("a", "A")
- cache.set("b", "B")
-
- copy.load(cache.dump())
- t.equal(copy.get("a"), undefined)
- t.equal(copy.get("b"), "B")
-
- //update the last read from original cache
- cache.get("a")
- copy.load(cache.dump())
- t.equal(copy.get("a"), "A")
- t.equal(copy.get("b"), undefined)
-
- t.end()
-})
-
-
-test("load to other age cache", function(t) {
- var cache = new LRU({maxAge: 50}),
- aged = new LRU({maxAge: 100}),
- simple = new LRU(),
- arr,
- expired
-
- //created at 0
- //a would be valid till 0 + 50
- cache.set("a", "A")
- setTimeout(function () {
- //created at 20
- //b would be valid till 20 + 50
- cache.set("b", "B")
- //b would be valid till 20 + 70
- cache.set("c", "C", 70)
- arr = cache.dump()
- t.equal(arr.length, 3)
- }, 20)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("c"), "C")
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), "B")
- t.equal(aged.get("c"), "C")
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), "B")
- t.equal(simple.get("c"), "C")
- }, 60)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("c"), "C")
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), undefined)
- t.equal(aged.get("c"), "C")
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), undefined)
- t.equal(simple.get("c"), "C")
- }, 80)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("c"), undefined)
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), undefined)
- t.equal(aged.get("c"), undefined)
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), undefined)
- t.equal(simple.get("c"), undefined)
- t.end()
- }, 100)
-
-})
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/README.md
deleted file mode 100644
index 33b447cd53e779..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# lodash.repeat v4.0.0
-
-The [lodash](https://lodash.com/) method `_.repeat` exported as a [Node.js](https://nodejs.org/) module.
-
-## Installation
-
-Using npm:
-```bash
-$ {sudo -H} npm i -g npm
-$ npm i --save lodash.repeat
-```
-
-In Node.js:
-```js
-var repeat = require('lodash.repeat');
-```
-
-See the [documentation](https://lodash.com/docs#repeat) or [package source](https://github.com/lodash/lodash/blob/4.0.0-npm-packages/lodash.repeat) for more details.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/index.js
deleted file mode 100644
index 7a54e85714c367..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/index.js
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * lodash 4.0.0 (Custom Build)
- * Build: `lodash modularize exports="npm" -o ./`
- * Copyright 2012-2016 The Dojo Foundation
- * Based on Underscore.js 1.8.3
- * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
- * Available under MIT license
- */
-var toString = require('lodash.tostring');
-
-/** Used as references for various `Number` constants. */
-var INFINITY = 1 / 0,
- MAX_SAFE_INTEGER = 9007199254740991,
- MAX_INTEGER = 1.7976931348623157e+308,
- NAN = 0 / 0;
-
-/** `Object#toString` result references. */
-var funcTag = '[object Function]',
- genTag = '[object GeneratorFunction]';
-
-/** Used to match leading and trailing whitespace. */
-var reTrim = /^\s+|\s+$/g;
-
-/** Used to detect bad signed hexadecimal string values. */
-var reIsBadHex = /^[-+]0x[0-9a-f]+$/i;
-
-/** Used to detect binary string values. */
-var reIsBinary = /^0b[01]+$/i;
-
-/** Used to detect octal string values. */
-var reIsOctal = /^0o[0-7]+$/i;
-
-/** Built-in method references without a dependency on `root`. */
-var freeParseInt = parseInt;
-
-/** Used for built-in method references. */
-var objectProto = Object.prototype;
-
-/**
- * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
- * of values.
- */
-var objectToString = objectProto.toString;
-
-/* Built-in method references for those with the same name as other `lodash` methods. */
-var nativeFloor = Math.floor;
-
-/**
- * Checks if `value` is classified as a `Function` object.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
- * @example
- *
- * _.isFunction(_);
- * // => true
- *
- * _.isFunction(/abc/);
- * // => false
- */
-function isFunction(value) {
- // The use of `Object#toString` avoids issues with the `typeof` operator
- // in Safari 8 which returns 'object' for typed array constructors, and
- // PhantomJS 1.9 which returns 'function' for `NodeList` instances.
- var tag = isObject(value) ? objectToString.call(value) : '';
- return tag == funcTag || tag == genTag;
-}
-
-/**
- * Checks if `value` is the [language type](https://es5.github.io/#x8) of `Object`.
- * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is an object, else `false`.
- * @example
- *
- * _.isObject({});
- * // => true
- *
- * _.isObject([1, 2, 3]);
- * // => true
- *
- * _.isObject(_.noop);
- * // => true
- *
- * _.isObject(null);
- * // => false
- */
-function isObject(value) {
- var type = typeof value;
- return !!value && (type == 'object' || type == 'function');
-}
-
-/**
- * Converts `value` to an integer.
- *
- * **Note:** This function is loosely based on [`ToInteger`](http://www.ecma-international.org/ecma-262/6.0/#sec-tointeger).
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to convert.
- * @returns {number} Returns the converted integer.
- * @example
- *
- * _.toInteger(3);
- * // => 3
- *
- * _.toInteger(Number.MIN_VALUE);
- * // => 0
- *
- * _.toInteger(Infinity);
- * // => 1.7976931348623157e+308
- *
- * _.toInteger('3');
- * // => 3
- */
-function toInteger(value) {
- if (!value) {
- return value === 0 ? value : 0;
- }
- value = toNumber(value);
- if (value === INFINITY || value === -INFINITY) {
- var sign = (value < 0 ? -1 : 1);
- return sign * MAX_INTEGER;
- }
- var remainder = value % 1;
- return value === value ? (remainder ? value - remainder : value) : 0;
-}
-
-/**
- * Converts `value` to a number.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to process.
- * @returns {number} Returns the number.
- * @example
- *
- * _.toNumber(3);
- * // => 3
- *
- * _.toNumber(Number.MIN_VALUE);
- * // => 5e-324
- *
- * _.toNumber(Infinity);
- * // => Infinity
- *
- * _.toNumber('3');
- * // => 3
- */
-function toNumber(value) {
- if (isObject(value)) {
- var other = isFunction(value.valueOf) ? value.valueOf() : value;
- value = isObject(other) ? (other + '') : other;
- }
- if (typeof value != 'string') {
- return value === 0 ? value : +value;
- }
- value = value.replace(reTrim, '');
- var isBinary = reIsBinary.test(value);
- return (isBinary || reIsOctal.test(value))
- ? freeParseInt(value.slice(2), isBinary ? 2 : 8)
- : (reIsBadHex.test(value) ? NAN : +value);
-}
-
-/**
- * Repeats the given string `n` times.
- *
- * @static
- * @memberOf _
- * @category String
- * @param {string} [string=''] The string to repeat.
- * @param {number} [n=0] The number of times to repeat the string.
- * @returns {string} Returns the repeated string.
- * @example
- *
- * _.repeat('*', 3);
- * // => '***'
- *
- * _.repeat('abc', 2);
- * // => 'abcabc'
- *
- * _.repeat('abc', 0);
- * // => ''
- */
-function repeat(string, n) {
- string = toString(string);
- n = toInteger(n);
-
- var result = '';
- if (!string || n < 1 || n > MAX_SAFE_INTEGER) {
- return result;
- }
- // Leverage the exponentiation by squaring algorithm for a faster repeat.
- // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details.
- do {
- if (n % 2) {
- result += string;
- }
- n = nativeFloor(n / 2);
- string += string;
- } while (n);
-
- return result;
-}
-
-module.exports = repeat;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/package.json
deleted file mode 100644
index a3292eb7f1bf87..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
- "name": "lodash.repeat",
- "version": "4.0.0",
- "description": "The lodash method `_.repeat` exported as a module.",
- "homepage": "https://lodash.com/",
- "icon": "https://lodash.com/icon.svg",
- "license": "MIT",
- "keywords": [
- "lodash-modularized",
- "repeat"
- ],
- "author": {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- "contributors": [
- {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- {
- "name": "Blaine Bublitz",
- "email": "blaine@iceddev.com",
- "url": "https://github.com/phated"
- },
- {
- "name": "Mathias Bynens",
- "email": "mathias@qiwi.be",
- "url": "https://mathiasbynens.be/"
- }
- ],
- "repository": {
- "type": "git",
- "url": "git+https://github.com/lodash/lodash.git"
- },
- "scripts": {
- "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\""
- },
- "dependencies": {
- "lodash.tostring": "^4.0.0"
- },
- "bugs": {
- "url": "https://github.com/lodash/lodash/issues"
- },
- "_id": "lodash.repeat@4.0.0",
- "_shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "_from": "lodash.repeat@>=4.0.0 <5.0.0",
- "_npmVersion": "2.14.18",
- "_nodeVersion": "5.5.0",
- "_npmUser": {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- "dist": {
- "shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "tarball": "http://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
- },
- "maintainers": [
- {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- {
- "name": "mathias",
- "email": "mathias@qiwi.be"
- },
- {
- "name": "phated",
- "email": "blaine@iceddev.com"
- }
- ],
- "_npmOperationalInternal": {
- "host": "packages-9-west.internal.npmjs.com",
- "tmp": "tmp/lodash.repeat-4.0.0.tgz_1455602646227_0.7481637196615338"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
-}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/LICENSE
deleted file mode 100644
index bcbe13d67a9621..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/LICENSE
+++ /dev/null
@@ -1,23 +0,0 @@
-The MIT License (MIT)
-
-Copyright 2012-2016 The Dojo Foundation
-Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
-DocumentCloud and Investigative Reporters & Editors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/package.json
deleted file mode 100644
index a6a8dbcfce201f..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/package.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
- "name": "lodash.tostring",
- "version": "4.1.2",
- "description": "The lodash method `_.toString` exported as a module.",
- "homepage": "https://lodash.com/",
- "icon": "https://lodash.com/icon.svg",
- "license": "MIT",
- "keywords": [
- "lodash-modularized",
- "tostring"
- ],
- "author": {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- "contributors": [
- {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- {
- "name": "Blaine Bublitz",
- "email": "blaine.bublitz@gmail.com",
- "url": "https://github.com/phated"
- },
- {
- "name": "Mathias Bynens",
- "email": "mathias@qiwi.be",
- "url": "https://mathiasbynens.be/"
- }
- ],
- "repository": {
- "type": "git",
- "url": "git+https://github.com/lodash/lodash.git"
- },
- "scripts": {
- "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\""
- },
- "bugs": {
- "url": "https://github.com/lodash/lodash/issues"
- },
- "_id": "lodash.tostring@4.1.2",
- "_shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "_from": "lodash.tostring@>=4.0.0 <5.0.0",
- "_npmVersion": "2.14.17",
- "_nodeVersion": "5.5.0",
- "_npmUser": {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- "dist": {
- "shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "tarball": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
- },
- "maintainers": [
- {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- {
- "name": "mathias",
- "email": "mathias@qiwi.be"
- },
- {
- "name": "phated",
- "email": "blaine.bublitz@gmail.com"
- }
- ],
- "_npmOperationalInternal": {
- "host": "packages-13-west.internal.npmjs.com",
- "tmp": "tmp/lodash.tostring-4.1.2.tgz_1456896853027_0.8195764778647572"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
-}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/README.md
deleted file mode 100644
index 33b447cd53e779..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# lodash.repeat v4.0.0
-
-The [lodash](https://lodash.com/) method `_.repeat` exported as a [Node.js](https://nodejs.org/) module.
-
-## Installation
-
-Using npm:
-```bash
-$ {sudo -H} npm i -g npm
-$ npm i --save lodash.repeat
-```
-
-In Node.js:
-```js
-var repeat = require('lodash.repeat');
-```
-
-See the [documentation](https://lodash.com/docs#repeat) or [package source](https://github.com/lodash/lodash/blob/4.0.0-npm-packages/lodash.repeat) for more details.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/index.js
deleted file mode 100644
index 7a54e85714c367..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/index.js
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * lodash 4.0.0 (Custom Build)
- * Build: `lodash modularize exports="npm" -o ./`
- * Copyright 2012-2016 The Dojo Foundation
- * Based on Underscore.js 1.8.3
- * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
- * Available under MIT license
- */
-var toString = require('lodash.tostring');
-
-/** Used as references for various `Number` constants. */
-var INFINITY = 1 / 0,
- MAX_SAFE_INTEGER = 9007199254740991,
- MAX_INTEGER = 1.7976931348623157e+308,
- NAN = 0 / 0;
-
-/** `Object#toString` result references. */
-var funcTag = '[object Function]',
- genTag = '[object GeneratorFunction]';
-
-/** Used to match leading and trailing whitespace. */
-var reTrim = /^\s+|\s+$/g;
-
-/** Used to detect bad signed hexadecimal string values. */
-var reIsBadHex = /^[-+]0x[0-9a-f]+$/i;
-
-/** Used to detect binary string values. */
-var reIsBinary = /^0b[01]+$/i;
-
-/** Used to detect octal string values. */
-var reIsOctal = /^0o[0-7]+$/i;
-
-/** Built-in method references without a dependency on `root`. */
-var freeParseInt = parseInt;
-
-/** Used for built-in method references. */
-var objectProto = Object.prototype;
-
-/**
- * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
- * of values.
- */
-var objectToString = objectProto.toString;
-
-/* Built-in method references for those with the same name as other `lodash` methods. */
-var nativeFloor = Math.floor;
-
-/**
- * Checks if `value` is classified as a `Function` object.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
- * @example
- *
- * _.isFunction(_);
- * // => true
- *
- * _.isFunction(/abc/);
- * // => false
- */
-function isFunction(value) {
- // The use of `Object#toString` avoids issues with the `typeof` operator
- // in Safari 8 which returns 'object' for typed array constructors, and
- // PhantomJS 1.9 which returns 'function' for `NodeList` instances.
- var tag = isObject(value) ? objectToString.call(value) : '';
- return tag == funcTag || tag == genTag;
-}
-
-/**
- * Checks if `value` is the [language type](https://es5.github.io/#x8) of `Object`.
- * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is an object, else `false`.
- * @example
- *
- * _.isObject({});
- * // => true
- *
- * _.isObject([1, 2, 3]);
- * // => true
- *
- * _.isObject(_.noop);
- * // => true
- *
- * _.isObject(null);
- * // => false
- */
-function isObject(value) {
- var type = typeof value;
- return !!value && (type == 'object' || type == 'function');
-}
-
-/**
- * Converts `value` to an integer.
- *
- * **Note:** This function is loosely based on [`ToInteger`](http://www.ecma-international.org/ecma-262/6.0/#sec-tointeger).
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to convert.
- * @returns {number} Returns the converted integer.
- * @example
- *
- * _.toInteger(3);
- * // => 3
- *
- * _.toInteger(Number.MIN_VALUE);
- * // => 0
- *
- * _.toInteger(Infinity);
- * // => 1.7976931348623157e+308
- *
- * _.toInteger('3');
- * // => 3
- */
-function toInteger(value) {
- if (!value) {
- return value === 0 ? value : 0;
- }
- value = toNumber(value);
- if (value === INFINITY || value === -INFINITY) {
- var sign = (value < 0 ? -1 : 1);
- return sign * MAX_INTEGER;
- }
- var remainder = value % 1;
- return value === value ? (remainder ? value - remainder : value) : 0;
-}
-
-/**
- * Converts `value` to a number.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to process.
- * @returns {number} Returns the number.
- * @example
- *
- * _.toNumber(3);
- * // => 3
- *
- * _.toNumber(Number.MIN_VALUE);
- * // => 5e-324
- *
- * _.toNumber(Infinity);
- * // => Infinity
- *
- * _.toNumber('3');
- * // => 3
- */
-function toNumber(value) {
- if (isObject(value)) {
- var other = isFunction(value.valueOf) ? value.valueOf() : value;
- value = isObject(other) ? (other + '') : other;
- }
- if (typeof value != 'string') {
- return value === 0 ? value : +value;
- }
- value = value.replace(reTrim, '');
- var isBinary = reIsBinary.test(value);
- return (isBinary || reIsOctal.test(value))
- ? freeParseInt(value.slice(2), isBinary ? 2 : 8)
- : (reIsBadHex.test(value) ? NAN : +value);
-}
-
-/**
- * Repeats the given string `n` times.
- *
- * @static
- * @memberOf _
- * @category String
- * @param {string} [string=''] The string to repeat.
- * @param {number} [n=0] The number of times to repeat the string.
- * @returns {string} Returns the repeated string.
- * @example
- *
- * _.repeat('*', 3);
- * // => '***'
- *
- * _.repeat('abc', 2);
- * // => 'abcabc'
- *
- * _.repeat('abc', 0);
- * // => ''
- */
-function repeat(string, n) {
- string = toString(string);
- n = toInteger(n);
-
- var result = '';
- if (!string || n < 1 || n > MAX_SAFE_INTEGER) {
- return result;
- }
- // Leverage the exponentiation by squaring algorithm for a faster repeat.
- // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details.
- do {
- if (n % 2) {
- result += string;
- }
- n = nativeFloor(n / 2);
- string += string;
- } while (n);
-
- return result;
-}
-
-module.exports = repeat;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/package.json
deleted file mode 100644
index a3292eb7f1bf87..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
- "name": "lodash.repeat",
- "version": "4.0.0",
- "description": "The lodash method `_.repeat` exported as a module.",
- "homepage": "https://lodash.com/",
- "icon": "https://lodash.com/icon.svg",
- "license": "MIT",
- "keywords": [
- "lodash-modularized",
- "repeat"
- ],
- "author": {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- "contributors": [
- {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- {
- "name": "Blaine Bublitz",
- "email": "blaine@iceddev.com",
- "url": "https://github.com/phated"
- },
- {
- "name": "Mathias Bynens",
- "email": "mathias@qiwi.be",
- "url": "https://mathiasbynens.be/"
- }
- ],
- "repository": {
- "type": "git",
- "url": "git+https://github.com/lodash/lodash.git"
- },
- "scripts": {
- "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\""
- },
- "dependencies": {
- "lodash.tostring": "^4.0.0"
- },
- "bugs": {
- "url": "https://github.com/lodash/lodash/issues"
- },
- "_id": "lodash.repeat@4.0.0",
- "_shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "_from": "lodash.repeat@>=4.0.0 <5.0.0",
- "_npmVersion": "2.14.18",
- "_nodeVersion": "5.5.0",
- "_npmUser": {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- "dist": {
- "shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "tarball": "http://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
- },
- "maintainers": [
- {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- {
- "name": "mathias",
- "email": "mathias@qiwi.be"
- },
- {
- "name": "phated",
- "email": "blaine@iceddev.com"
- }
- ],
- "_npmOperationalInternal": {
- "host": "packages-9-west.internal.npmjs.com",
- "tmp": "tmp/lodash.repeat-4.0.0.tgz_1455602646227_0.7481637196615338"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
-}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/LICENSE
deleted file mode 100644
index bcbe13d67a9621..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/LICENSE
+++ /dev/null
@@ -1,23 +0,0 @@
-The MIT License (MIT)
-
-Copyright 2012-2016 The Dojo Foundation
-Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
-DocumentCloud and Investigative Reporters & Editors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/README.md
deleted file mode 100644
index b3858fd68e7866..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# lodash.tostring v4.1.2
-
-The [lodash](https://lodash.com/) method `_.toString` exported as a [Node.js](https://nodejs.org/) module.
-
-## Installation
-
-Using npm:
-```bash
-$ {sudo -H} npm i -g npm
-$ npm i --save lodash.tostring
-```
-
-In Node.js:
-```js
-var toString = require('lodash.tostring');
-```
-
-See the [documentation](https://lodash.com/docs#toString) or [package source](https://github.com/lodash/lodash/blob/4.1.2-npm-packages/lodash.tostring) for more details.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/index.js
deleted file mode 100644
index 5c0e3e2b987eb6..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/index.js
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * lodash 4.1.2 (Custom Build)
- * Build: `lodash modularize exports="npm" -o ./`
- * Copyright 2012-2016 The Dojo Foundation
- * Based on Underscore.js 1.8.3
- * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
- * Available under MIT license
- */
-
-/** Used as references for various `Number` constants. */
-var INFINITY = 1 / 0;
-
-/** `Object#toString` result references. */
-var symbolTag = '[object Symbol]';
-
-/** Used to determine if values are of the language type `Object`. */
-var objectTypes = {
- 'function': true,
- 'object': true
-};
-
-/** Detect free variable `exports`. */
-var freeExports = (objectTypes[typeof exports] && exports && !exports.nodeType)
- ? exports
- : undefined;
-
-/** Detect free variable `module`. */
-var freeModule = (objectTypes[typeof module] && module && !module.nodeType)
- ? module
- : undefined;
-
-/** Detect free variable `global` from Node.js. */
-var freeGlobal = checkGlobal(freeExports && freeModule && typeof global == 'object' && global);
-
-/** Detect free variable `self`. */
-var freeSelf = checkGlobal(objectTypes[typeof self] && self);
-
-/** Detect free variable `window`. */
-var freeWindow = checkGlobal(objectTypes[typeof window] && window);
-
-/** Detect `this` as the global object. */
-var thisGlobal = checkGlobal(objectTypes[typeof this] && this);
-
-/**
- * Used as a reference to the global object.
- *
- * The `this` value is used if it's the global object to avoid Greasemonkey's
- * restricted `window` object, otherwise the `window` object is used.
- */
-var root = freeGlobal ||
- ((freeWindow !== (thisGlobal && thisGlobal.window)) && freeWindow) ||
- freeSelf || thisGlobal || Function('return this')();
-
-/**
- * Checks if `value` is a global object.
- *
- * @private
- * @param {*} value The value to check.
- * @returns {null|Object} Returns `value` if it's a global object, else `null`.
- */
-function checkGlobal(value) {
- return (value && value.Object === Object) ? value : null;
-}
-
-/** Used for built-in method references. */
-var objectProto = Object.prototype;
-
-/**
- * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
- * of values.
- */
-var objectToString = objectProto.toString;
-
-/** Built-in value references. */
-var Symbol = root.Symbol;
-
-/** Used to convert symbols to primitives and strings. */
-var symbolProto = Symbol ? Symbol.prototype : undefined,
- symbolToString = symbolProto ? symbolProto.toString : undefined;
-
-/**
- * Checks if `value` is object-like. A value is object-like if it's not `null`
- * and has a `typeof` result of "object".
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
- * @example
- *
- * _.isObjectLike({});
- * // => true
- *
- * _.isObjectLike([1, 2, 3]);
- * // => true
- *
- * _.isObjectLike(_.noop);
- * // => false
- *
- * _.isObjectLike(null);
- * // => false
- */
-function isObjectLike(value) {
- return !!value && typeof value == 'object';
-}
-
-/**
- * Checks if `value` is classified as a `Symbol` primitive or object.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
- * @example
- *
- * _.isSymbol(Symbol.iterator);
- * // => true
- *
- * _.isSymbol('abc');
- * // => false
- */
-function isSymbol(value) {
- return typeof value == 'symbol' ||
- (isObjectLike(value) && objectToString.call(value) == symbolTag);
-}
-
-/**
- * Converts `value` to a string if it's not one. An empty string is returned
- * for `null` and `undefined` values. The sign of `-0` is preserved.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to process.
- * @returns {string} Returns the string.
- * @example
- *
- * _.toString(null);
- * // => ''
- *
- * _.toString(-0);
- * // => '-0'
- *
- * _.toString([1, 2, 3]);
- * // => '1,2,3'
- */
-function toString(value) {
- // Exit early for strings to avoid a performance hit in some environments.
- if (typeof value == 'string') {
- return value;
- }
- if (value == null) {
- return '';
- }
- if (isSymbol(value)) {
- return symbolToString ? symbolToString.call(value) : '';
- }
- var result = (value + '');
- return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
-}
-
-module.exports = toString;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/LICENSE
deleted file mode 100644
index bcbe13d67a9621..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/LICENSE
+++ /dev/null
@@ -1,23 +0,0 @@
-The MIT License (MIT)
-
-Copyright 2012-2016 The Dojo Foundation
-Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
-DocumentCloud and Investigative Reporters & Editors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/index.js
deleted file mode 100644
index 7a54e85714c367..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/index.js
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * lodash 4.0.0 (Custom Build)
- * Build: `lodash modularize exports="npm" -o ./`
- * Copyright 2012-2016 The Dojo Foundation
- * Based on Underscore.js 1.8.3
- * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
- * Available under MIT license
- */
-var toString = require('lodash.tostring');
-
-/** Used as references for various `Number` constants. */
-var INFINITY = 1 / 0,
- MAX_SAFE_INTEGER = 9007199254740991,
- MAX_INTEGER = 1.7976931348623157e+308,
- NAN = 0 / 0;
-
-/** `Object#toString` result references. */
-var funcTag = '[object Function]',
- genTag = '[object GeneratorFunction]';
-
-/** Used to match leading and trailing whitespace. */
-var reTrim = /^\s+|\s+$/g;
-
-/** Used to detect bad signed hexadecimal string values. */
-var reIsBadHex = /^[-+]0x[0-9a-f]+$/i;
-
-/** Used to detect binary string values. */
-var reIsBinary = /^0b[01]+$/i;
-
-/** Used to detect octal string values. */
-var reIsOctal = /^0o[0-7]+$/i;
-
-/** Built-in method references without a dependency on `root`. */
-var freeParseInt = parseInt;
-
-/** Used for built-in method references. */
-var objectProto = Object.prototype;
-
-/**
- * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
- * of values.
- */
-var objectToString = objectProto.toString;
-
-/* Built-in method references for those with the same name as other `lodash` methods. */
-var nativeFloor = Math.floor;
-
-/**
- * Checks if `value` is classified as a `Function` object.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
- * @example
- *
- * _.isFunction(_);
- * // => true
- *
- * _.isFunction(/abc/);
- * // => false
- */
-function isFunction(value) {
- // The use of `Object#toString` avoids issues with the `typeof` operator
- // in Safari 8 which returns 'object' for typed array constructors, and
- // PhantomJS 1.9 which returns 'function' for `NodeList` instances.
- var tag = isObject(value) ? objectToString.call(value) : '';
- return tag == funcTag || tag == genTag;
-}
-
-/**
- * Checks if `value` is the [language type](https://es5.github.io/#x8) of `Object`.
- * (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is an object, else `false`.
- * @example
- *
- * _.isObject({});
- * // => true
- *
- * _.isObject([1, 2, 3]);
- * // => true
- *
- * _.isObject(_.noop);
- * // => true
- *
- * _.isObject(null);
- * // => false
- */
-function isObject(value) {
- var type = typeof value;
- return !!value && (type == 'object' || type == 'function');
-}
-
-/**
- * Converts `value` to an integer.
- *
- * **Note:** This function is loosely based on [`ToInteger`](http://www.ecma-international.org/ecma-262/6.0/#sec-tointeger).
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to convert.
- * @returns {number} Returns the converted integer.
- * @example
- *
- * _.toInteger(3);
- * // => 3
- *
- * _.toInteger(Number.MIN_VALUE);
- * // => 0
- *
- * _.toInteger(Infinity);
- * // => 1.7976931348623157e+308
- *
- * _.toInteger('3');
- * // => 3
- */
-function toInteger(value) {
- if (!value) {
- return value === 0 ? value : 0;
- }
- value = toNumber(value);
- if (value === INFINITY || value === -INFINITY) {
- var sign = (value < 0 ? -1 : 1);
- return sign * MAX_INTEGER;
- }
- var remainder = value % 1;
- return value === value ? (remainder ? value - remainder : value) : 0;
-}
-
-/**
- * Converts `value` to a number.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to process.
- * @returns {number} Returns the number.
- * @example
- *
- * _.toNumber(3);
- * // => 3
- *
- * _.toNumber(Number.MIN_VALUE);
- * // => 5e-324
- *
- * _.toNumber(Infinity);
- * // => Infinity
- *
- * _.toNumber('3');
- * // => 3
- */
-function toNumber(value) {
- if (isObject(value)) {
- var other = isFunction(value.valueOf) ? value.valueOf() : value;
- value = isObject(other) ? (other + '') : other;
- }
- if (typeof value != 'string') {
- return value === 0 ? value : +value;
- }
- value = value.replace(reTrim, '');
- var isBinary = reIsBinary.test(value);
- return (isBinary || reIsOctal.test(value))
- ? freeParseInt(value.slice(2), isBinary ? 2 : 8)
- : (reIsBadHex.test(value) ? NAN : +value);
-}
-
-/**
- * Repeats the given string `n` times.
- *
- * @static
- * @memberOf _
- * @category String
- * @param {string} [string=''] The string to repeat.
- * @param {number} [n=0] The number of times to repeat the string.
- * @returns {string} Returns the repeated string.
- * @example
- *
- * _.repeat('*', 3);
- * // => '***'
- *
- * _.repeat('abc', 2);
- * // => 'abcabc'
- *
- * _.repeat('abc', 0);
- * // => ''
- */
-function repeat(string, n) {
- string = toString(string);
- n = toInteger(n);
-
- var result = '';
- if (!string || n < 1 || n > MAX_SAFE_INTEGER) {
- return result;
- }
- // Leverage the exponentiation by squaring algorithm for a faster repeat.
- // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details.
- do {
- if (n % 2) {
- result += string;
- }
- n = nativeFloor(n / 2);
- string += string;
- } while (n);
-
- return result;
-}
-
-module.exports = repeat;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/LICENSE
deleted file mode 100644
index bcbe13d67a9621..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/LICENSE
+++ /dev/null
@@ -1,23 +0,0 @@
-The MIT License (MIT)
-
-Copyright 2012-2016 The Dojo Foundation
-Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
-DocumentCloud and Investigative Reporters & Editors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/README.md
deleted file mode 100644
index b3858fd68e7866..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# lodash.tostring v4.1.2
-
-The [lodash](https://lodash.com/) method `_.toString` exported as a [Node.js](https://nodejs.org/) module.
-
-## Installation
-
-Using npm:
-```bash
-$ {sudo -H} npm i -g npm
-$ npm i --save lodash.tostring
-```
-
-In Node.js:
-```js
-var toString = require('lodash.tostring');
-```
-
-See the [documentation](https://lodash.com/docs#toString) or [package source](https://github.com/lodash/lodash/blob/4.1.2-npm-packages/lodash.tostring) for more details.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/index.js
deleted file mode 100644
index 5c0e3e2b987eb6..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/index.js
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * lodash 4.1.2 (Custom Build)
- * Build: `lodash modularize exports="npm" -o ./`
- * Copyright 2012-2016 The Dojo Foundation
- * Based on Underscore.js 1.8.3
- * Copyright 2009-2016 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
- * Available under MIT license
- */
-
-/** Used as references for various `Number` constants. */
-var INFINITY = 1 / 0;
-
-/** `Object#toString` result references. */
-var symbolTag = '[object Symbol]';
-
-/** Used to determine if values are of the language type `Object`. */
-var objectTypes = {
- 'function': true,
- 'object': true
-};
-
-/** Detect free variable `exports`. */
-var freeExports = (objectTypes[typeof exports] && exports && !exports.nodeType)
- ? exports
- : undefined;
-
-/** Detect free variable `module`. */
-var freeModule = (objectTypes[typeof module] && module && !module.nodeType)
- ? module
- : undefined;
-
-/** Detect free variable `global` from Node.js. */
-var freeGlobal = checkGlobal(freeExports && freeModule && typeof global == 'object' && global);
-
-/** Detect free variable `self`. */
-var freeSelf = checkGlobal(objectTypes[typeof self] && self);
-
-/** Detect free variable `window`. */
-var freeWindow = checkGlobal(objectTypes[typeof window] && window);
-
-/** Detect `this` as the global object. */
-var thisGlobal = checkGlobal(objectTypes[typeof this] && this);
-
-/**
- * Used as a reference to the global object.
- *
- * The `this` value is used if it's the global object to avoid Greasemonkey's
- * restricted `window` object, otherwise the `window` object is used.
- */
-var root = freeGlobal ||
- ((freeWindow !== (thisGlobal && thisGlobal.window)) && freeWindow) ||
- freeSelf || thisGlobal || Function('return this')();
-
-/**
- * Checks if `value` is a global object.
- *
- * @private
- * @param {*} value The value to check.
- * @returns {null|Object} Returns `value` if it's a global object, else `null`.
- */
-function checkGlobal(value) {
- return (value && value.Object === Object) ? value : null;
-}
-
-/** Used for built-in method references. */
-var objectProto = Object.prototype;
-
-/**
- * Used to resolve the [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
- * of values.
- */
-var objectToString = objectProto.toString;
-
-/** Built-in value references. */
-var Symbol = root.Symbol;
-
-/** Used to convert symbols to primitives and strings. */
-var symbolProto = Symbol ? Symbol.prototype : undefined,
- symbolToString = symbolProto ? symbolProto.toString : undefined;
-
-/**
- * Checks if `value` is object-like. A value is object-like if it's not `null`
- * and has a `typeof` result of "object".
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
- * @example
- *
- * _.isObjectLike({});
- * // => true
- *
- * _.isObjectLike([1, 2, 3]);
- * // => true
- *
- * _.isObjectLike(_.noop);
- * // => false
- *
- * _.isObjectLike(null);
- * // => false
- */
-function isObjectLike(value) {
- return !!value && typeof value == 'object';
-}
-
-/**
- * Checks if `value` is classified as a `Symbol` primitive or object.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to check.
- * @returns {boolean} Returns `true` if `value` is correctly classified, else `false`.
- * @example
- *
- * _.isSymbol(Symbol.iterator);
- * // => true
- *
- * _.isSymbol('abc');
- * // => false
- */
-function isSymbol(value) {
- return typeof value == 'symbol' ||
- (isObjectLike(value) && objectToString.call(value) == symbolTag);
-}
-
-/**
- * Converts `value` to a string if it's not one. An empty string is returned
- * for `null` and `undefined` values. The sign of `-0` is preserved.
- *
- * @static
- * @memberOf _
- * @category Lang
- * @param {*} value The value to process.
- * @returns {string} Returns the string.
- * @example
- *
- * _.toString(null);
- * // => ''
- *
- * _.toString(-0);
- * // => '-0'
- *
- * _.toString([1, 2, 3]);
- * // => '1,2,3'
- */
-function toString(value) {
- // Exit early for strings to avoid a performance hit in some environments.
- if (typeof value == 'string') {
- return value;
- }
- if (value == null) {
- return '';
- }
- if (isSymbol(value)) {
- return symbolToString ? symbolToString.call(value) : '';
- }
- var result = (value + '');
- return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
-}
-
-module.exports = toString;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/package.json
deleted file mode 100644
index a6a8dbcfce201f..00000000000000
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.tostring/package.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
- "name": "lodash.tostring",
- "version": "4.1.2",
- "description": "The lodash method `_.toString` exported as a module.",
- "homepage": "https://lodash.com/",
- "icon": "https://lodash.com/icon.svg",
- "license": "MIT",
- "keywords": [
- "lodash-modularized",
- "tostring"
- ],
- "author": {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- "contributors": [
- {
- "name": "John-David Dalton",
- "email": "john.david.dalton@gmail.com",
- "url": "http://allyoucanleet.com/"
- },
- {
- "name": "Blaine Bublitz",
- "email": "blaine.bublitz@gmail.com",
- "url": "https://github.com/phated"
- },
- {
- "name": "Mathias Bynens",
- "email": "mathias@qiwi.be",
- "url": "https://mathiasbynens.be/"
- }
- ],
- "repository": {
- "type": "git",
- "url": "git+https://github.com/lodash/lodash.git"
- },
- "scripts": {
- "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\""
- },
- "bugs": {
- "url": "https://github.com/lodash/lodash/issues"
- },
- "_id": "lodash.tostring@4.1.2",
- "_shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "_from": "lodash.tostring@>=4.0.0 <5.0.0",
- "_npmVersion": "2.14.17",
- "_nodeVersion": "5.5.0",
- "_npmUser": {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- "dist": {
- "shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "tarball": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
- },
- "maintainers": [
- {
- "name": "jdalton",
- "email": "john.david.dalton@gmail.com"
- },
- {
- "name": "mathias",
- "email": "mathias@qiwi.be"
- },
- {
- "name": "phated",
- "email": "blaine.bublitz@gmail.com"
- }
- ],
- "_npmOperationalInternal": {
- "host": "packages-13-west.internal.npmjs.com",
- "tmp": "tmp/lodash.tostring-4.1.2.tgz_1456896853027_0.8195764778647572"
- },
- "directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
-}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/LICENSE
new file mode 100644
index 00000000000000..e0c69d56032d15
--- /dev/null
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/LICENSE
@@ -0,0 +1,47 @@
+Copyright jQuery Foundation and other contributors
+
+Based on Underscore.js, copyright Jeremy Ashkenas,
+DocumentCloud and Investigative Reporters & Editors
+
+This software consists of voluntary contributions made by many
+individuals. For exact contribution history, see the revision history
+available at https://github.com/lodash/lodash
+
+The following license applies to all parts of this software except as
+documented below:
+
+====
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+====
+
+Copyright and related rights for sample code are waived via CC0. Sample
+code is defined as all source code displayed within the prose of the
+documentation.
+
+CC0: http://creativecommons.org/publicdomain/zero/1.0/
+
+====
+
+Files located in the node_modules and vendor directories are externally
+maintained libraries used by this software which have their own
+licenses; we recommend you read them, as their terms may differ from the
+terms above.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/README.md
similarity index 80%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/README.md
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/README.md
index 33b447cd53e779..9d626e83f8c191 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/README.md
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/README.md
@@ -1,4 +1,4 @@
-# lodash.repeat v4.0.0
+# lodash.repeat v4.0.2
The [lodash](https://lodash.com/) method `_.repeat` exported as a [Node.js](https://nodejs.org/) module.
@@ -15,4 +15,4 @@ In Node.js:
var repeat = require('lodash.repeat');
```
-See the [documentation](https://lodash.com/docs#repeat) or [package source](https://github.com/lodash/lodash/blob/4.0.0-npm-packages/lodash.repeat) for more details.
+See the [documentation](https://lodash.com/docs#repeat) or [package source](https://github.com/lodash/lodash/blob/4.0.2-npm-packages/lodash.repeat) for more details.
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/index.js
new file mode 100644
index 00000000000000..5c290973169fec
--- /dev/null
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/index.js
@@ -0,0 +1,463 @@
+/**
+ * lodash 4.0.2 (Custom Build)
+ * Build: `lodash modularize exports="npm" -o ./`
+ * Copyright jQuery Foundation and other contributors
+ * Released under MIT license
+ * Based on Underscore.js 1.8.3
+ * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+ */
+var toString = require('lodash.tostring');
+
+/** Used as references for various `Number` constants. */
+var INFINITY = 1 / 0,
+ MAX_SAFE_INTEGER = 9007199254740991,
+ MAX_INTEGER = 1.7976931348623157e+308,
+ NAN = 0 / 0;
+
+/** `Object#toString` result references. */
+var funcTag = '[object Function]',
+ genTag = '[object GeneratorFunction]',
+ symbolTag = '[object Symbol]';
+
+/** Used to match leading and trailing whitespace. */
+var reTrim = /^\s+|\s+$/g;
+
+/** Used to detect bad signed hexadecimal string values. */
+var reIsBadHex = /^[-+]0x[0-9a-f]+$/i;
+
+/** Used to detect binary string values. */
+var reIsBinary = /^0b[01]+$/i;
+
+/** Used to detect octal string values. */
+var reIsOctal = /^0o[0-7]+$/i;
+
+/** Used to detect unsigned integer values. */
+var reIsUint = /^(?:0|[1-9]\d*)$/;
+
+/** Built-in method references without a dependency on `root`. */
+var freeParseInt = parseInt;
+
+/**
+ * Checks if `value` is a valid array-like index.
+ *
+ * @private
+ * @param {*} value The value to check.
+ * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
+ * @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
+ */
+function isIndex(value, length) {
+ value = (typeof value == 'number' || reIsUint.test(value)) ? +value : -1;
+ length = length == null ? MAX_SAFE_INTEGER : length;
+ return value > -1 && value % 1 == 0 && value < length;
+}
+
+/** Used for built-in method references. */
+var objectProto = Object.prototype;
+
+/**
+ * Used to resolve the
+ * [`toStringTag`](http://ecma-international.org/ecma-262/6.0/#sec-object.prototype.tostring)
+ * of values.
+ */
+var objectToString = objectProto.toString;
+
+/* Built-in method references for those with the same name as other `lodash` methods. */
+var nativeFloor = Math.floor;
+
+/**
+ * The base implementation of `_.property` without support for deep paths.
+ *
+ * @private
+ * @param {string} key The key of the property to get.
+ * @returns {Function} Returns the new function.
+ */
+function baseProperty(key) {
+ return function(object) {
+ return object == null ? undefined : object[key];
+ };
+}
+
+/**
+ * The base implementation of `_.repeat` which doesn't coerce arguments.
+ *
+ * @private
+ * @param {string} string The string to repeat.
+ * @param {number} n The number of times to repeat the string.
+ * @returns {string} Returns the repeated string.
+ */
+function baseRepeat(string, n) {
+ var result = '';
+ if (!string || n < 1 || n > MAX_SAFE_INTEGER) {
+ return result;
+ }
+ // Leverage the exponentiation by squaring algorithm for a faster repeat.
+ // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details.
+ do {
+ if (n % 2) {
+ result += string;
+ }
+ n = nativeFloor(n / 2);
+ if (n) {
+ string += string;
+ }
+ } while (n);
+
+ return result;
+}
+
+/**
+ * Gets the "length" property value of `object`.
+ *
+ * **Note:** This function is used to avoid a
+ * [JIT bug](https://bugs.webkit.org/show_bug.cgi?id=142792) that affects
+ * Safari on at least iOS 8.1-8.3 ARM64.
+ *
+ * @private
+ * @param {Object} object The object to query.
+ * @returns {*} Returns the "length" value.
+ */
+var getLength = baseProperty('length');
+
+/**
+ * Checks if the given arguments are from an iteratee call.
+ *
+ * @private
+ * @param {*} value The potential iteratee value argument.
+ * @param {*} index The potential iteratee index or key argument.
+ * @param {*} object The potential iteratee object argument.
+ * @returns {boolean} Returns `true` if the arguments are from an iteratee call,
+ * else `false`.
+ */
+function isIterateeCall(value, index, object) {
+ if (!isObject(object)) {
+ return false;
+ }
+ var type = typeof index;
+ if (type == 'number'
+ ? (isArrayLike(object) && isIndex(index, object.length))
+ : (type == 'string' && index in object)
+ ) {
+ return eq(object[index], value);
+ }
+ return false;
+}
+
+/**
+ * Performs a
+ * [`SameValueZero`](http://ecma-international.org/ecma-262/6.0/#sec-samevaluezero)
+ * comparison between two values to determine if they are equivalent.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to compare.
+ * @param {*} other The other value to compare.
+ * @returns {boolean} Returns `true` if the values are equivalent, else `false`.
+ * @example
+ *
+ * var object = { 'user': 'fred' };
+ * var other = { 'user': 'fred' };
+ *
+ * _.eq(object, object);
+ * // => true
+ *
+ * _.eq(object, other);
+ * // => false
+ *
+ * _.eq('a', 'a');
+ * // => true
+ *
+ * _.eq('a', Object('a'));
+ * // => false
+ *
+ * _.eq(NaN, NaN);
+ * // => true
+ */
+function eq(value, other) {
+ return value === other || (value !== value && other !== other);
+}
+
+/**
+ * Checks if `value` is array-like. A value is considered array-like if it's
+ * not a function and has a `value.length` that's an integer greater than or
+ * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is array-like, else `false`.
+ * @example
+ *
+ * _.isArrayLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isArrayLike(document.body.children);
+ * // => true
+ *
+ * _.isArrayLike('abc');
+ * // => true
+ *
+ * _.isArrayLike(_.noop);
+ * // => false
+ */
+function isArrayLike(value) {
+ return value != null && isLength(getLength(value)) && !isFunction(value);
+}
+
+/**
+ * Checks if `value` is classified as a `Function` object.
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is correctly classified,
+ * else `false`.
+ * @example
+ *
+ * _.isFunction(_);
+ * // => true
+ *
+ * _.isFunction(/abc/);
+ * // => false
+ */
+function isFunction(value) {
+ // The use of `Object#toString` avoids issues with the `typeof` operator
+ // in Safari 8 which returns 'object' for typed array and weak map constructors,
+ // and PhantomJS 1.9 which returns 'function' for `NodeList` instances.
+ var tag = isObject(value) ? objectToString.call(value) : '';
+ return tag == funcTag || tag == genTag;
+}
+
+/**
+ * Checks if `value` is a valid array-like length.
+ *
+ * **Note:** This function is loosely based on
+ * [`ToLength`](http://ecma-international.org/ecma-262/6.0/#sec-tolength).
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is a valid length,
+ * else `false`.
+ * @example
+ *
+ * _.isLength(3);
+ * // => true
+ *
+ * _.isLength(Number.MIN_VALUE);
+ * // => false
+ *
+ * _.isLength(Infinity);
+ * // => false
+ *
+ * _.isLength('3');
+ * // => false
+ */
+function isLength(value) {
+ return typeof value == 'number' &&
+ value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
+}
+
+/**
+ * Checks if `value` is the
+ * [language type](http://www.ecma-international.org/ecma-262/6.0/#sec-ecmascript-language-types)
+ * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
+ *
+ * @static
+ * @memberOf _
+ * @since 0.1.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is an object, else `false`.
+ * @example
+ *
+ * _.isObject({});
+ * // => true
+ *
+ * _.isObject([1, 2, 3]);
+ * // => true
+ *
+ * _.isObject(_.noop);
+ * // => true
+ *
+ * _.isObject(null);
+ * // => false
+ */
+function isObject(value) {
+ var type = typeof value;
+ return !!value && (type == 'object' || type == 'function');
+}
+
+/**
+ * Checks if `value` is object-like. A value is object-like if it's not `null`
+ * and has a `typeof` result of "object".
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is object-like, else `false`.
+ * @example
+ *
+ * _.isObjectLike({});
+ * // => true
+ *
+ * _.isObjectLike([1, 2, 3]);
+ * // => true
+ *
+ * _.isObjectLike(_.noop);
+ * // => false
+ *
+ * _.isObjectLike(null);
+ * // => false
+ */
+function isObjectLike(value) {
+ return !!value && typeof value == 'object';
+}
+
+/**
+ * Checks if `value` is classified as a `Symbol` primitive or object.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to check.
+ * @returns {boolean} Returns `true` if `value` is correctly classified,
+ * else `false`.
+ * @example
+ *
+ * _.isSymbol(Symbol.iterator);
+ * // => true
+ *
+ * _.isSymbol('abc');
+ * // => false
+ */
+function isSymbol(value) {
+ return typeof value == 'symbol' ||
+ (isObjectLike(value) && objectToString.call(value) == symbolTag);
+}
+
+/**
+ * Converts `value` to an integer.
+ *
+ * **Note:** This function is loosely based on
+ * [`ToInteger`](http://www.ecma-international.org/ecma-262/6.0/#sec-tointeger).
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to convert.
+ * @returns {number} Returns the converted integer.
+ * @example
+ *
+ * _.toInteger(3);
+ * // => 3
+ *
+ * _.toInteger(Number.MIN_VALUE);
+ * // => 0
+ *
+ * _.toInteger(Infinity);
+ * // => 1.7976931348623157e+308
+ *
+ * _.toInteger('3');
+ * // => 3
+ */
+function toInteger(value) {
+ if (!value) {
+ return value === 0 ? value : 0;
+ }
+ value = toNumber(value);
+ if (value === INFINITY || value === -INFINITY) {
+ var sign = (value < 0 ? -1 : 1);
+ return sign * MAX_INTEGER;
+ }
+ var remainder = value % 1;
+ return value === value ? (remainder ? value - remainder : value) : 0;
+}
+
+/**
+ * Converts `value` to a number.
+ *
+ * @static
+ * @memberOf _
+ * @since 4.0.0
+ * @category Lang
+ * @param {*} value The value to process.
+ * @returns {number} Returns the number.
+ * @example
+ *
+ * _.toNumber(3);
+ * // => 3
+ *
+ * _.toNumber(Number.MIN_VALUE);
+ * // => 5e-324
+ *
+ * _.toNumber(Infinity);
+ * // => Infinity
+ *
+ * _.toNumber('3');
+ * // => 3
+ */
+function toNumber(value) {
+ if (typeof value == 'number') {
+ return value;
+ }
+ if (isSymbol(value)) {
+ return NAN;
+ }
+ if (isObject(value)) {
+ var other = isFunction(value.valueOf) ? value.valueOf() : value;
+ value = isObject(other) ? (other + '') : other;
+ }
+ if (typeof value != 'string') {
+ return value === 0 ? value : +value;
+ }
+ value = value.replace(reTrim, '');
+ var isBinary = reIsBinary.test(value);
+ return (isBinary || reIsOctal.test(value))
+ ? freeParseInt(value.slice(2), isBinary ? 2 : 8)
+ : (reIsBadHex.test(value) ? NAN : +value);
+}
+
+/**
+ * Repeats the given string `n` times.
+ *
+ * @static
+ * @memberOf _
+ * @since 3.0.0
+ * @category String
+ * @param {string} [string=''] The string to repeat.
+ * @param {number} [n=1] The number of times to repeat the string.
+ * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`.
+ * @returns {string} Returns the repeated string.
+ * @example
+ *
+ * _.repeat('*', 3);
+ * // => '***'
+ *
+ * _.repeat('abc', 2);
+ * // => 'abcabc'
+ *
+ * _.repeat('abc', 0);
+ * // => ''
+ */
+function repeat(string, n, guard) {
+ if ((guard ? isIterateeCall(string, n, guard) : n === undefined)) {
+ n = 1;
+ } else {
+ n = toInteger(n);
+ }
+ return baseRepeat(toString(string), n);
+}
+
+module.exports = repeat;
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/package.json
similarity index 74%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/package.json
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/package.json
index a3292eb7f1bf87..2d808082598013 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padstart/node_modules/lodash.repeat/package.json
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.repeat/package.json
@@ -1,6 +1,6 @@
{
"name": "lodash.repeat",
- "version": "4.0.0",
+ "version": "4.0.2",
"description": "The lodash method `_.repeat` exported as a module.",
"homepage": "https://lodash.com/",
"icon": "https://lodash.com/icon.svg",
@@ -22,7 +22,7 @@
},
{
"name": "Blaine Bublitz",
- "email": "blaine@iceddev.com",
+ "email": "blaine.bublitz@gmail.com",
"url": "https://github.com/phated"
},
{
@@ -44,18 +44,18 @@
"bugs": {
"url": "https://github.com/lodash/lodash/issues"
},
- "_id": "lodash.repeat@4.0.0",
- "_shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "_from": "lodash.repeat@>=4.0.0 <5.0.0",
- "_npmVersion": "2.14.18",
+ "_id": "lodash.repeat@4.0.2",
+ "_shasum": "72c4e409757448c99e3c4c334ab066b789ca3f3b",
+ "_from": "lodash.repeat@4.0.2",
+ "_npmVersion": "2.15.3",
"_nodeVersion": "5.5.0",
"_npmUser": {
"name": "jdalton",
"email": "john.david.dalton@gmail.com"
},
"dist": {
- "shasum": "aaf570b2ab0bfb0dda6d6e93291d54b30b1f7d22",
- "tarball": "http://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
+ "shasum": "72c4e409757448c99e3c4c334ab066b789ca3f3b",
+ "tarball": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.2.tgz"
},
"maintainers": [
{
@@ -72,9 +72,10 @@
}
],
"_npmOperationalInternal": {
- "host": "packages-9-west.internal.npmjs.com",
- "tmp": "tmp/lodash.repeat-4.0.0.tgz_1455602646227_0.7481637196615338"
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/lodash.repeat-4.0.2.tgz_1460126921614_0.3440221941564232"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.0.tgz"
+ "_resolved": "https://registry.npmjs.org/lodash.repeat/-/lodash.repeat-4.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/LICENSE b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/LICENSE
similarity index 100%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.repeat/LICENSE
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/LICENSE
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/README.md b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/README.md
similarity index 100%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/README.md
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/README.md
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/index.js b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/index.js
similarity index 100%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.pad/node_modules/lodash.tostring/index.js
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/index.js
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/package.json b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/package.json
similarity index 90%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/package.json
rename to deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/package.json
index a6a8dbcfce201f..daf01553a964eb 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.tostring/package.json
+++ b/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.tostring/package.json
@@ -43,7 +43,7 @@
},
"_id": "lodash.tostring@4.1.2",
"_shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "_from": "lodash.tostring@>=4.0.0 <5.0.0",
+ "_from": "lodash.tostring@4.1.2",
"_npmVersion": "2.14.17",
"_nodeVersion": "5.5.0",
"_npmUser": {
@@ -52,7 +52,7 @@
},
"dist": {
"shasum": "7d326a5cf64da4298f2fd35b688d848267535288",
- "tarball": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
+ "tarball": "http://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
},
"maintainers": [
{
@@ -73,5 +73,6 @@
"tmp": "tmp/lodash.tostring-4.1.2.tgz_1456896853027_0.8195764778647572"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz"
+ "_resolved": "https://registry.npmjs.org/lodash.tostring/-/lodash.tostring-4.1.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/glob/package.json b/deps/npm/node_modules/read-package-json/node_modules/glob/package.json
index 19e926cbf1035a..d6cebb41e61088 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/glob/package.json
+++ b/deps/npm/node_modules/read-package-json/node_modules/glob/package.json
@@ -59,7 +59,7 @@
},
"dist": {
"shasum": "0f08860f6a155127b2fadd4f9ce24b1aab6e4d22",
- "tarball": "http://registry.npmjs.org/glob/-/glob-6.0.4.tgz"
+ "tarball": "https://registry.npmjs.org/glob/-/glob-6.0.4.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
new file mode 100644
index 00000000000000..5c93f456546877
--- /dev/null
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/LICENSE
@@ -0,0 +1,13 @@
+ DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+ Version 2, December 2004
+
+ Copyright (C) 2004 Sam Hocevar
+
+ Everyone is permitted to copy and distribute verbatim or modified
+ copies of this license document, and changing it is allowed as long
+ as the name is changed.
+
+ DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. You just DO WHAT THE FUCK YOU WANT TO.
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
index 5f9fe998610d0f..0c9fbe68809cd0 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/parse.js
@@ -75,9 +75,22 @@ function formatError(input, msg, position, lineno, column, json5) {
function parse(input, options) {
// parse as a standard JSON mode
- var json5 = !(options.mode === 'json' || options.legacy)
+ var json5 = false;
+ var cjson = false;
+
+ if (options.legacy || options.mode === 'json') {
+ // use json
+ } else if (options.mode === 'cjson') {
+ cjson = true;
+ } else if (options.mode === 'json5') {
+ json5 = true;
+ } else {
+ // use it by default
+ json5 = true;
+ }
+
var isLineTerminator = json5 ? Uni.isLineTerminator : Uni.isLineTerminatorJSON
- var isWhiteSpace = json5 ? Uni.isWhiteSpace : Uni.isWhiteSpaceJSON
+ var isWhiteSpace = json5 ? Uni.isWhiteSpace : Uni.isWhiteSpaceJSON
var length = input.length
, lineno = 0
@@ -257,7 +270,7 @@ function parse(input, options) {
// nothing
} else if (chr === '/'
- && json5
+ && (json5 || cjson)
&& (input[position] === '/' || input[position] === '*')
) {
position--
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
index ce89d77ee1f433..232229ecc8af20 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/lib/stringify.js
@@ -38,7 +38,7 @@ var hasOwnProperty = Object.prototype.hasOwnProperty
var escapable = /[\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/
function _stringify(object, options, recursiveLvl, currentKey) {
- var opt_json = options.mode === 'json'
+ var json5 = (options.mode === 'json5' || !options.mode)
/*
* Opinionated decision warning:
*
@@ -114,18 +114,18 @@ function _stringify(object, options, recursiveLvl, currentKey) {
var chr = key.charCodeAt(i)
if (chr < 0x10) {
- if (chr === 0 && !opt_json) {
+ if (chr === 0 && json5) {
result += '\\0'
- } else if (chr >= 8 && chr <= 13 && (!opt_json || chr !== 11)) {
+ } else if (chr >= 8 && chr <= 13 && (json5 || chr !== 11)) {
result += special_chars[chr]
- } else if (!opt_json) {
+ } else if (json5) {
result += '\\x0' + chr.toString(16)
} else {
result += '\\u000' + chr.toString(16)
}
} else if (chr < 0x20) {
- if (!opt_json) {
+ if (json5) {
result += '\\x' + chr.toString(16)
} else {
result += '\\u00' + chr.toString(16)
@@ -149,7 +149,7 @@ function _stringify(object, options, recursiveLvl, currentKey) {
} else if (options.ascii || Uni.isLineTerminator(key[i]) || escapable.exec(key[i])) {
if (chr < 0x100) {
- if (!opt_json) {
+ if (json5) {
result += '\\x' + chr.toString(16)
} else {
result += '\\u00' + chr.toString(16)
@@ -256,7 +256,7 @@ function _stringify(object, options, recursiveLvl, currentKey) {
// information needlessly?
return '-0'
}
- if (options.mode === 'json' && !Number.isFinite(object)) {
+ if (!json5 && !Number.isFinite(object)) {
// json don't support infinity (= sucks)
return 'null'
}
@@ -343,9 +343,9 @@ module.exports.stringify = function stringifyJSON(object, options, _space) {
if (options.indent == null) options.indent = '\t'
if (options.quote == null) options.quote = "'"
if (options.ascii == null) options.ascii = false
- if (options.mode == null) options.mode = 'simple'
+ if (options.mode == null) options.mode = 'json5'
- if (options.mode === 'json') {
+ if (options.mode === 'json' || options.mode === 'cjson') {
// json only supports double quotes (= sucks)
options.quote = '"'
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
index 6e66208d9e1504..1edba5effc5435 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.json
@@ -1,6 +1,6 @@
{
"name": "jju",
- "version": "1.2.1",
+ "version": "1.3.0",
"description": "a set of utilities to work with JSON / JSON5 documents",
"author": {
"name": "Alex Kocharin",
@@ -8,7 +8,7 @@
},
"repository": {
"type": "git",
- "url": "git://github.com/rlidwka/jju"
+ "url": "git://github.com/rlidwka/jju.git"
},
"bugs": {
"url": "https://github.com/rlidwka/jju/issues"
@@ -37,9 +37,9 @@
"type": "WTFPL",
"url": "http://www.wtfpl.net/txt/copying/"
},
- "gitHead": "8b079c1d03af527ab28a47c7b714d6f888abc53d",
- "_id": "jju@1.2.1",
- "_shasum": "edf6ec20d5d668c80c2c00cea63f8a9422a4b528",
+ "gitHead": "6a1248fc29abb3f418fa143e31ee548cd5a2477c",
+ "_id": "jju@1.3.0",
+ "_shasum": "dadd9ef01924bc728b03f2f7979bdbd62f7a2aaa",
"_from": "jju@>=1.1.0 <2.0.0",
"_npmVersion": "2.0.1",
"_nodeVersion": "2.2.1",
@@ -54,9 +54,14 @@
}
],
"dist": {
- "shasum": "edf6ec20d5d668c80c2c00cea63f8a9422a4b528",
- "tarball": "http://registry.npmjs.org/jju/-/jju-1.2.1.tgz"
+ "shasum": "dadd9ef01924bc728b03f2f7979bdbd62f7a2aaa",
+ "tarball": "https://registry.npmjs.org/jju/-/jju-1.3.0.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-6-west.internal.npmjs.com",
+ "tmp": "tmp/jju-1.3.0.tgz_1455989902144_0.8787874563131481"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/jju/-/jju-1.2.1.tgz"
+ "_resolved": "https://registry.npmjs.org/jju/-/jju-1.3.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
index 19283ecc9dc3ad..fdbb5372d4bcde 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/node_modules/jju/package.yaml
@@ -3,7 +3,7 @@
# "jju" stands for "json/json5 utils"
name: jju
-version: 1.2.1
+version: 1.3.0
description: a set of utilities to work with JSON / JSON5 documents
author:
diff --git a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json
index 99d79f71a3e8a9..3bfae8dab06b94 100644
--- a/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json
+++ b/deps/npm/node_modules/read-package-json/node_modules/json-parse-helpfulerror/package.json
@@ -9,7 +9,7 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/smikes/json-parse-helpfulerror.git"
+ "url": "git+https://github.com/smikes/json-parse-helpfulerror.git"
},
"keywords": [
"json",
@@ -53,8 +53,9 @@
],
"dist": {
"shasum": "13f14ce02eed4e981297b64eb9e3b932e2dd13dc",
- "tarball": "http://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz"
+ "tarball": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz"
+ "_resolved": "https://registry.npmjs.org/json-parse-helpfulerror/-/json-parse-helpfulerror-1.0.3.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/read-package-json/package.json b/deps/npm/node_modules/read-package-json/package.json
index a607dd06fed061..a64f7b2b2529de 100644
--- a/deps/npm/node_modules/read-package-json/package.json
+++ b/deps/npm/node_modules/read-package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "read-package-json",
- "version": "2.0.3",
+ "version": "2.0.4",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
@@ -29,23 +29,23 @@
"graceful-fs": "^4.1.2"
},
"license": "ISC",
- "gitHead": "336a212716bb830781d7e71580adaeda377b69d9",
+ "gitHead": "de5172a8ada18a2a906294216e22206bfa13321d",
"bugs": {
"url": "https://github.com/npm/read-package-json/issues"
},
"homepage": "https://github.com/npm/read-package-json#readme",
- "_id": "read-package-json@2.0.3",
- "_shasum": "f8cec1627053b54f384b353224545e607554c5d2",
- "_from": "read-package-json@2.0.3",
- "_npmVersion": "3.5.4",
- "_nodeVersion": "4.2.2",
+ "_id": "read-package-json@2.0.4",
+ "_shasum": "61ed1b2256ea438d8008895090be84b8e799c853",
+ "_from": "read-package-json@2.0.4",
+ "_npmVersion": "3.8.7",
+ "_nodeVersion": "5.6.0",
"_npmUser": {
- "name": "iarna",
- "email": "me@re-becca.org"
+ "name": "zkat",
+ "email": "kat@sykosomatic.org"
},
"dist": {
- "shasum": "f8cec1627053b54f384b353224545e607554c5d2",
- "tarball": "http://registry.npmjs.org/read-package-json/-/read-package-json-2.0.3.tgz"
+ "shasum": "61ed1b2256ea438d8008895090be84b8e799c853",
+ "tarball": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.4.tgz"
},
"maintainers": [
{
@@ -65,6 +65,11 @@
"email": "kat@sykosomatic.org"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/read-package-json-2.0.4.tgz_1461711438155_0.03128739935345948"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.3.tgz"
+ "_resolved": "https://registry.npmjs.org/read-package-json/-/read-package-json-2.0.4.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/read-package-json/read-json.js b/deps/npm/node_modules/read-package-json/read-json.js
index 3f93603f89b289..b1888956c8aae0 100644
--- a/deps/npm/node_modules/read-package-json/read-json.js
+++ b/deps/npm/node_modules/read-package-json/read-json.js
@@ -58,7 +58,18 @@ function stripBOM (content) {
}
function parseJson (file, er, d, log, strict, cb) {
- if (er && er.code === 'ENOENT') return indexjs(file, er, log, strict, cb)
+ if (er && er.code === 'ENOENT') {
+ return fs.stat(path.dirname(file), function (err, stat) {
+ if (!err && stat && !stat.isDirectory()) {
+ // ENOTDIR isn't used on Windows, but npm expects it.
+ er = Object.create(er)
+ er.code = 'ENOTDIR'
+ return cb(er)
+ } else {
+ return indexjs(file, er, log, strict, cb)
+ }
+ })
+ }
if (er) return cb(er)
try {
diff --git a/deps/npm/node_modules/read-package-json/test/helpful.js b/deps/npm/node_modules/read-package-json/test/helpful.js
index f5b1a8b1304834..84f531360c2b02 100644
--- a/deps/npm/node_modules/read-package-json/test/helpful.js
+++ b/deps/npm/node_modules/read-package-json/test/helpful.js
@@ -10,3 +10,11 @@ tap.test('erroneous package data', function (t) {
t.end()
})
})
+
+tap.test('ENOTDIR for non-directory packages', function (t) {
+ readJson(path.resolve(__filename, 'package.json'), function (er, data) {
+ t.ok(er)
+ t.equal(er.code, 'ENOTDIR')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/read-package-json/zunda b/deps/npm/node_modules/read-package-json/zunda
deleted file mode 100644
index 8a80494483ec17..00000000000000
--- a/deps/npm/node_modules/read-package-json/zunda
+++ /dev/null
@@ -1 +0,0 @@
-{"_id":"zunda","_rev":"2-d344ae8ca038029c6651c8fd579d3b39","name":"zunda","description":"Zunda for Node.js","dist-tags":{"latest":"0.0.1"},"versions":{"0.0.1":{"name":"zunda","version":"0.0.1","author":{"name":"inken"},"description":"Zunda for Node.js","main":"zunda.js","dependencies":{"execsync":"*"},"repository":{"type":"git","url":"https://github.com/inkenkun/node-zunda.git"},"keywords":["zunda"],"homepage":"http://x1.inkenkun.com/","license":"MIT","bugs":{"url":"https://github.com/inkenkun/node-zunda/issues"},"_id":"zunda@0.0.1","dist":{"shasum":"c54fe03aa53efbcf52fcd5c7da4180d3f37871c9","tarball":"http://registry.npmjs.org/zunda/-/zunda-0.0.1.tgz"},"_from":".","_npmVersion":"1.4.3","_npmUser":{"name":"inken","email":"inkenkun@gmail.com"},"maintainers":[{"name":"inken","email":"inkenkun@gmail.com"}],"directories":{}}},"readme":"# ZundaJS\r\n\r\nZundaの結果をパースする Node.js 用モジュールです。\r\n\r\n## Installation\r\n\r\n``` shell\r\n$ npm install zunda\r\n```\r\n\r\n\r\n## Usage\r\n\r\n### Asynchronous\r\n\r\n```javascript\r\n\tvar Zunda = new require('zunda')\r\n\t , zunda = new Zunda()\r\n\t;\r\n\tzunda.parse('次郎は大阪に行ったが、太郎は東京には行かず地元に残ろうとした', function(err, result) {\r\n\t\tif (err) throw err;\r\n\t\tconsole.log(result);\r\n\t});\r\n```\r\n\r\n### Synchronous\r\n\r\n```javascript\r\n\tvar Zunda = new require('zunda')\r\n\t , zunda = new Zunda()\r\n\t;\r\n\tvar result = zunda.parseSync('次郎は大阪に行ったが、太郎は東京には行かず地元に残ろうとした');\r\n\tconsole.log(result);\r\n```\r\n\r\n## Result\r\n\r\n\r\n\t [ \r\n\t { event: [ '#EVENT0', '4', 'wr:筆者', '非未来', '0', '叙述', '成立', '0', '0' ],\r\n words: '次郎は大阪に行ったが、',\r\n \t wakachi: '次郎 は 大阪 に 行っ た が 、' },\r\n { event: [ '#EVENT1', '13', 'wr:筆者', '未来', '0', '叙述', '不成立', '0', '0' ],\r\n words: '太郎は東京には行かず',\r\n wakachi: '太郎 は 東京 に は 行か ず' },\r\n { event: [ '#EVENT2', '17', 'wr:筆者', '未来', '0', '意志', '高確率', 'ポジティブ', '0' ],\r\n words: '地元に残ろうとした',\r\n wakachi: '地元 に 残ろ う と し た' },\r\n { event: [ '#EVENT3', '20', 'wr:筆者', '非未来', '0', '叙述', '成立', '0', '0' ],\r\n words: '残ろうとした',\r\n wakachi: '残ろ う と し た' } \r\n ]\r\n\r\n\r\n\r\n## Reference\r\n\r\nhicomiさんの mecab-async https://www.npmjs.org/package/mecab-async を参考にさせていただきました。\r\nzunda https://code.google.com/p/zunda/\r\n","maintainers":[{"name":"inken","email":"inkenkun@gmail.com"}],"time":{"modified":"2014-06-23T01:53:45.429Z","created":"2014-06-23T01:53:45.429Z","0.0.1":"2014-06-23T01:53:45.429Z"},"homepage":"http://x1.inkenkun.com/","keywords":["zunda"],"repository":{"type":"git","url":"https://github.com/inkenkun/node-zunda.git"},"author":{"name":"inken"},"bugs":{"url":"https://github.com/inkenkun/node-zunda/issues"},"license":"MIT","readmeFilename":"README.md","_attachments":{}}
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/readable-stream/.travis.yml
new file mode 100644
index 00000000000000..ae0156a9bbb156
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/.travis.yml
@@ -0,0 +1,54 @@
+sudo: false
+language: node_js
+before_install:
+ - npm install -g npm@2
+ - npm install -g npm
+notifications:
+ email: false
+matrix:
+ fast_finish: true
+ allow_failures:
+ - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
+ include:
+ - node_js: '0.8'
+ env: TASK=test
+ - node_js: '0.10'
+ env: TASK=test
+ - node_js: '0.11'
+ env: TASK=test
+ - node_js: '0.12'
+ env: TASK=test
+ - node_js: 1
+ env: TASK=test
+ - node_js: 2
+ env: TASK=test
+ - node_js: 3
+ env: TASK=test
+ - node_js: 4
+ env: TASK=test
+ - node_js: 5
+ env: TASK=test
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest
+script: "npm run $TASK"
+env:
+ global:
+ - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
+ - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=
diff --git a/deps/npm/node_modules/readable-stream/README.md b/deps/npm/node_modules/readable-stream/README.md
index e46b823903d2c6..c4e4a34a7ae0fa 100644
--- a/deps/npm/node_modules/readable-stream/README.md
+++ b/deps/npm/node_modules/readable-stream/README.md
@@ -1,15 +1,36 @@
# readable-stream
-***Node-core streams for userland***
+***Node-core v5.9.1 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
+
[](https://nodei.co/npm/readable-stream/)
-[](https://nodei.co/npm/readable-stream/)
+[](https://nodei.co/npm/readable-stream/)
+
+
+[](https://saucelabs.com/u/readable-stream)
+
+```bash
+npm install --save readable-stream
+```
+
+***Node-core streams for userland***
-This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
+This package is a mirror of the Streams2 and Streams3 implementations in
+Node-core, including [documentation](doc/stream.markdown).
-If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
+If you want to guarantee a stable streams base, regardless of what version of
+Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
-**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
+As of version 2.0.0 **readable-stream** uses semantic versioning.
-**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"`
+# Streams WG Team Members
+* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com>
+ - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
+* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com>
+ - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
+* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org>
+ - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
+* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com>
+* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com>
+* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me>
diff --git a/deps/npm/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/readable-stream/doc/stream.markdown
new file mode 100644
index 00000000000000..ecf997a3f5a14f
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/doc/stream.markdown
@@ -0,0 +1,1761 @@
+# Stream
+
+ Stability: 2 - Stable
+
+A stream is an abstract interface implemented by various objects in
+Node.js. For example a [request to an HTTP server][http-incoming-message] is a
+stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All
+streams are instances of [`EventEmitter`][].
+
+You can load the Stream base classes by doing `require('stream')`.
+There are base classes provided for [Readable][] streams, [Writable][]
+streams, [Duplex][] streams, and [Transform][] streams.
+
+This document is split up into 3 sections:
+
+1. The first section explains the parts of the API that you need to be
+ aware of to use streams in your programs.
+2. The second section explains the parts of the API that you need to
+ use if you implement your own custom streams yourself. The API is designed to
+ make this easy for you to do.
+3. The third section goes into more depth about how streams work,
+ including some of the internal mechanisms and functions that you
+ should probably not modify unless you definitely know what you are
+ doing.
+
+
+## API for Stream Consumers
+
+
+
+Streams can be either [Readable][], [Writable][], or both ([Duplex][]).
+
+All streams are EventEmitters, but they also have other custom methods
+and properties depending on whether they are Readable, Writable, or
+Duplex.
+
+If a stream is both Readable and Writable, then it implements all of
+the methods and events. So, a [Duplex][] or [Transform][] stream is
+fully described by this API, though their implementation may be
+somewhat different.
+
+It is not necessary to implement Stream interfaces in order to consume
+streams in your programs. If you **are** implementing streaming
+interfaces in your own program, please also refer to
+[API for Stream Implementors][].
+
+Almost all Node.js programs, no matter how simple, use Streams in some
+way. Here is an example of using Streams in an Node.js program:
+
+```js
+const http = require('http');
+
+var server = http.createServer( (req, res) => {
+ // req is an http.IncomingMessage, which is a Readable Stream
+ // res is an http.ServerResponse, which is a Writable Stream
+
+ var body = '';
+ // we want to get the data as utf8 strings
+ // If you don't set an encoding, then you'll get Buffer objects
+ req.setEncoding('utf8');
+
+ // Readable streams emit 'data' events once a listener is added
+ req.on('data', (chunk) => {
+ body += chunk;
+ });
+
+ // the end event tells you that you have entire body
+ req.on('end', () => {
+ try {
+ var data = JSON.parse(body);
+ } catch (er) {
+ // uh oh! bad json!
+ res.statusCode = 400;
+ return res.end(`error: ${er.message}`);
+ }
+
+ // write back something interesting to the user:
+ res.write(typeof data);
+ res.end();
+ });
+});
+
+server.listen(1337);
+
+// $ curl localhost:1337 -d '{}'
+// object
+// $ curl localhost:1337 -d '"foo"'
+// string
+// $ curl localhost:1337 -d 'not json'
+// error: Unexpected token o
+```
+
+### Class: stream.Duplex
+
+Duplex streams are streams that implement both the [Readable][] and
+[Writable][] interfaces.
+
+Examples of Duplex streams include:
+
+* [TCP sockets][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+
+### Class: stream.Readable
+
+
+
+The Readable stream interface is the abstraction for a *source* of
+data that you are reading from. In other words, data comes *out* of a
+Readable stream.
+
+A Readable stream will not start emitting data until you indicate that
+you are ready to receive it.
+
+Readable streams have two "modes": a **flowing mode** and a **paused
+mode**. When in flowing mode, data is read from the underlying system
+and provided to your program as fast as possible. In paused mode, you
+must explicitly call [`stream.read()`][stream-read] to get chunks of data out.
+Streams start out in paused mode.
+
+**Note**: If no data event handlers are attached, and there are no
+[`stream.pipe()`][] destinations, and the stream is switched into flowing
+mode, then data will be lost.
+
+You can switch to flowing mode by doing any of the following:
+
+* Adding a [`'data'`][] event handler to listen for data.
+* Calling the [`stream.resume()`][stream-resume] method to explicitly open the
+ flow.
+* Calling the [`stream.pipe()`][] method to send the data to a [Writable][].
+
+You can switch back to paused mode by doing either of the following:
+
+* If there are no pipe destinations, by calling the
+ [`stream.pause()`][stream-pause] method.
+* If there are pipe destinations, by removing any [`'data'`][] event
+ handlers, and removing all pipe destinations by calling the
+ [`stream.unpipe()`][] method.
+
+Note that, for backwards compatibility reasons, removing [`'data'`][]
+event handlers will **not** automatically pause the stream. Also, if
+there are piped destinations, then calling [`stream.pause()`][stream-pause] will
+not guarantee that the stream will *remain* paused once those
+destinations drain and ask for more data.
+
+Examples of readable streams include:
+
+* [HTTP responses, on the client][http-incoming-message]
+* [HTTP requests, on the server][http-incoming-message]
+* [fs read streams][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+* [TCP sockets][]
+* [child process stdout and stderr][]
+* [`process.stdin`][]
+
+#### Event: 'close'
+
+Emitted when the stream and any of its underlying resources (a file
+descriptor, for example) have been closed. The event indicates that
+no more events will be emitted, and no further computation will occur.
+
+Not all streams will emit the `'close'` event.
+
+#### Event: 'data'
+
+* `chunk` {Buffer|String} The chunk of data.
+
+Attaching a `'data'` event listener to a stream that has not been
+explicitly paused will switch the stream into flowing mode. Data will
+then be passed as soon as it is available.
+
+If you just want to get all the data out of the stream as fast as
+possible, this is the best way to do so.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.on('data', (chunk) => {
+ console.log('got %d bytes of data', chunk.length);
+});
+```
+
+#### Event: 'end'
+
+This event fires when there will be no more data to read.
+
+Note that the `'end'` event **will not fire** unless the data is
+completely consumed. This can be done by switching into flowing mode,
+or by calling [`stream.read()`][stream-read] repeatedly until you get to the
+end.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.on('data', (chunk) => {
+ console.log('got %d bytes of data', chunk.length);
+});
+readable.on('end', () => {
+ console.log('there will be no more data.');
+});
+```
+
+#### Event: 'error'
+
+* {Error Object}
+
+Emitted if there was an error receiving data.
+
+#### Event: 'readable'
+
+When a chunk of data can be read from the stream, it will emit a
+`'readable'` event.
+
+In some cases, listening for a `'readable'` event will cause some data
+to be read into the internal buffer from the underlying system, if it
+hadn't already.
+
+```javascript
+var readable = getReadableStreamSomehow();
+readable.on('readable', () => {
+ // there is some data to read now
+});
+```
+
+Once the internal buffer is drained, a `'readable'` event will fire
+again when more data is available.
+
+The `'readable'` event is not emitted in the "flowing" mode with the
+sole exception of the last one, on end-of-stream.
+
+The `'readable'` event indicates that the stream has new information:
+either new data is available or the end of the stream has been reached.
+In the former case, [`stream.read()`][stream-read] will return that data. In the
+latter case, [`stream.read()`][stream-read] will return null. For instance, in
+the following example, `foo.txt` is an empty file:
+
+```js
+const fs = require('fs');
+var rr = fs.createReadStream('foo.txt');
+rr.on('readable', () => {
+ console.log('readable:', rr.read());
+});
+rr.on('end', () => {
+ console.log('end');
+});
+```
+
+The output of running this script is:
+
+```
+$ node test.js
+readable: null
+end
+```
+
+#### readable.isPaused()
+
+* Return: {Boolean}
+
+This method returns whether or not the `readable` has been **explicitly**
+paused by client code (using [`stream.pause()`][stream-pause] without a
+corresponding [`stream.resume()`][stream-resume]).
+
+```js
+var readable = new stream.Readable
+
+readable.isPaused() // === false
+readable.pause()
+readable.isPaused() // === true
+readable.resume()
+readable.isPaused() // === false
+```
+
+#### readable.pause()
+
+* Return: `this`
+
+This method will cause a stream in flowing mode to stop emitting
+[`'data'`][] events, switching out of flowing mode. Any data that becomes
+available will remain in the internal buffer.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.on('data', (chunk) => {
+ console.log('got %d bytes of data', chunk.length);
+ readable.pause();
+ console.log('there will be no more data for 1 second');
+ setTimeout(() => {
+ console.log('now data will start flowing again');
+ readable.resume();
+ }, 1000);
+});
+```
+
+#### readable.pipe(destination[, options])
+
+* `destination` {stream.Writable} The destination for writing data
+* `options` {Object} Pipe options
+ * `end` {Boolean} End the writer when the reader ends. Default = `true`
+
+This method pulls all the data out of a readable stream, and writes it
+to the supplied destination, automatically managing the flow so that
+the destination is not overwhelmed by a fast readable stream.
+
+Multiple destinations can be piped to safely.
+
+```js
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt'
+readable.pipe(writable);
+```
+
+This function returns the destination stream, so you can set up pipe
+chains like so:
+
+```js
+var r = fs.createReadStream('file.txt');
+var z = zlib.createGzip();
+var w = fs.createWriteStream('file.txt.gz');
+r.pipe(z).pipe(w);
+```
+
+For example, emulating the Unix `cat` command:
+
+```js
+process.stdin.pipe(process.stdout);
+```
+
+By default [`stream.end()`][stream-end] is called on the destination when the
+source stream emits [`'end'`][], so that `destination` is no longer writable.
+Pass `{ end: false }` as `options` to keep the destination stream open.
+
+This keeps `writer` open so that "Goodbye" can be written at the
+end.
+
+```js
+reader.pipe(writer, { end: false });
+reader.on('end', () => {
+ writer.end('Goodbye\n');
+});
+```
+
+Note that [`process.stderr`][] and [`process.stdout`][] are never closed until
+the process exits, regardless of the specified options.
+
+#### readable.read([size])
+
+* `size` {Number} Optional argument to specify how much data to read.
+* Return {String|Buffer|Null}
+
+The `read()` method pulls some data out of the internal buffer and
+returns it. If there is no data available, then it will return
+`null`.
+
+If you pass in a `size` argument, then it will return that many
+bytes. If `size` bytes are not available, then it will return `null`,
+unless we've ended, in which case it will return the data remaining
+in the buffer.
+
+If you do not specify a `size` argument, then it will return all the
+data in the internal buffer.
+
+This method should only be called in paused mode. In flowing mode,
+this method is called automatically until the internal buffer is
+drained.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.on('readable', () => {
+ var chunk;
+ while (null !== (chunk = readable.read())) {
+ console.log('got %d bytes of data', chunk.length);
+ }
+});
+```
+
+If this method returns a data chunk, then it will also trigger the
+emission of a [`'data'`][] event.
+
+Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][]
+event has been triggered will return `null`. No runtime error will be raised.
+
+#### readable.resume()
+
+* Return: `this`
+
+This method will cause the readable stream to resume emitting [`'data'`][]
+events.
+
+This method will switch the stream into flowing mode. If you do *not*
+want to consume the data from a stream, but you *do* want to get to
+its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open
+the flow of data.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.resume();
+readable.on('end', () => {
+ console.log('got to the end, but did not read anything');
+});
+```
+
+#### readable.setEncoding(encoding)
+
+* `encoding` {String} The encoding to use.
+* Return: `this`
+
+Call this function to cause the stream to return strings of the specified
+encoding instead of Buffer objects. For example, if you do
+`readable.setEncoding('utf8')`, then the output data will be interpreted as
+UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`,
+then the data will be encoded in hexadecimal string format.
+
+This properly handles multi-byte characters that would otherwise be
+potentially mangled if you simply pulled the Buffers directly and
+called [`buf.toString(encoding)`][] on them. If you want to read the data
+as strings, always use this method.
+
+Also you can disable any encoding at all with `readable.setEncoding(null)`.
+This approach is very useful if you deal with binary data or with large
+multi-byte strings spread out over multiple chunks.
+
+```js
+var readable = getReadableStreamSomehow();
+readable.setEncoding('utf8');
+readable.on('data', (chunk) => {
+ assert.equal(typeof chunk, 'string');
+ console.log('got %d characters of string data', chunk.length);
+});
+```
+
+#### readable.unpipe([destination])
+
+* `destination` {stream.Writable} Optional specific stream to unpipe
+
+This method will remove the hooks set up for a previous [`stream.pipe()`][]
+call.
+
+If the destination is not specified, then all pipes are removed.
+
+If the destination is specified, but no pipe is set up for it, then
+this is a no-op.
+
+```js
+var readable = getReadableStreamSomehow();
+var writable = fs.createWriteStream('file.txt');
+// All the data from readable goes into 'file.txt',
+// but only for the first second
+readable.pipe(writable);
+setTimeout(() => {
+ console.log('stop writing to file.txt');
+ readable.unpipe(writable);
+ console.log('manually close the file stream');
+ writable.end();
+}, 1000);
+```
+
+#### readable.unshift(chunk)
+
+* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue
+
+This is useful in certain cases where a stream is being consumed by a
+parser, which needs to "un-consume" some data that it has
+optimistically pulled out of the source, so that the stream can be
+passed on to some other party.
+
+Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event
+has been triggered; a runtime error will be raised.
+
+If you find that you must often call `stream.unshift(chunk)` in your
+programs, consider implementing a [Transform][] stream instead. (See [API
+for Stream Implementors][].)
+
+```js
+// Pull off a header delimited by \n\n
+// use unshift() if we get too much
+// Call the callback with (error, header, stream)
+const StringDecoder = require('string_decoder').StringDecoder;
+function parseHeader(stream, callback) {
+ stream.on('error', callback);
+ stream.on('readable', onReadable);
+ var decoder = new StringDecoder('utf8');
+ var header = '';
+ function onReadable() {
+ var chunk;
+ while (null !== (chunk = stream.read())) {
+ var str = decoder.write(chunk);
+ if (str.match(/\n\n/)) {
+ // found the header boundary
+ var split = str.split(/\n\n/);
+ header += split.shift();
+ var remaining = split.join('\n\n');
+ var buf = new Buffer(remaining, 'utf8');
+ if (buf.length)
+ stream.unshift(buf);
+ stream.removeListener('error', callback);
+ stream.removeListener('readable', onReadable);
+ // now the body of the message can be read from the stream.
+ callback(null, header, stream);
+ } else {
+ // still reading the header.
+ header += str;
+ }
+ }
+ }
+}
+```
+
+Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)`
+will not end the reading process by resetting the internal reading state of the
+stream. This can cause unexpected results if `unshift()` is called during a
+read (i.e. from within a [`stream._read()`][stream-_read] implementation on a
+custom stream). Following the call to `unshift()` with an immediate
+[`stream.push('')`][stream-push] will reset the reading state appropriately,
+however it is best to simply avoid calling `unshift()` while in the process of
+performing a read.
+
+#### readable.wrap(stream)
+
+* `stream` {Stream} An "old style" readable stream
+
+Versions of Node.js prior to v0.10 had streams that did not implement the
+entire Streams API as it is today. (See [Compatibility][] for
+more information.)
+
+If you are using an older Node.js library that emits [`'data'`][] events and
+has a [`stream.pause()`][stream-pause] method that is advisory only, then you
+can use the `wrap()` method to create a [Readable][] stream that uses the old
+stream as its data source.
+
+You will very rarely ever need to call this function, but it exists
+as a convenience for interacting with old Node.js programs and libraries.
+
+For example:
+
+```js
+const OldReader = require('./old-api-module.js').OldReader;
+const Readable = require('stream').Readable;
+const oreader = new OldReader;
+const myReader = new Readable().wrap(oreader);
+
+myReader.on('readable', () => {
+ myReader.read(); // etc.
+});
+```
+
+### Class: stream.Transform
+
+Transform streams are [Duplex][] streams where the output is in some way
+computed from the input. They implement both the [Readable][] and
+[Writable][] interfaces.
+
+Examples of Transform streams include:
+
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+
+### Class: stream.Writable
+
+
+
+The Writable stream interface is an abstraction for a *destination*
+that you are writing data *to*.
+
+Examples of writable streams include:
+
+* [HTTP requests, on the client][]
+* [HTTP responses, on the server][]
+* [fs write streams][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+* [TCP sockets][]
+* [child process stdin][]
+* [`process.stdout`][], [`process.stderr`][]
+
+#### Event: 'drain'
+
+If a [`stream.write(chunk)`][stream-write] call returns `false`, then the
+`'drain'` event will indicate when it is appropriate to begin writing more data
+to the stream.
+
+```js
+// Write the data to the supplied writable stream one million times.
+// Be attentive to back-pressure.
+function writeOneMillionTimes(writer, data, encoding, callback) {
+ var i = 1000000;
+ write();
+ function write() {
+ var ok = true;
+ do {
+ i -= 1;
+ if (i === 0) {
+ // last time!
+ writer.write(data, encoding, callback);
+ } else {
+ // see if we should continue, or wait
+ // don't pass the callback, because we're not done yet.
+ ok = writer.write(data, encoding);
+ }
+ } while (i > 0 && ok);
+ if (i > 0) {
+ // had to stop early!
+ // write some more once it drains
+ writer.once('drain', write);
+ }
+ }
+}
+```
+
+#### Event: 'error'
+
+* {Error}
+
+Emitted if there was an error when writing or piping data.
+
+#### Event: 'finish'
+
+When the [`stream.end()`][stream-end] method has been called, and all data has
+been flushed to the underlying system, this event is emitted.
+
+```javascript
+var writer = getWritableStreamSomehow();
+for (var i = 0; i < 100; i ++) {
+ writer.write('hello, #${i}!\n');
+}
+writer.end('this is the end\n');
+writer.on('finish', () => {
+ console.error('all writes are now complete.');
+});
+```
+
+#### Event: 'pipe'
+
+* `src` {stream.Readable} source stream that is piping to this writable
+
+This is emitted whenever the [`stream.pipe()`][] method is called on a readable
+stream, adding this writable to its set of destinations.
+
+```js
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('pipe', (src) => {
+ console.error('something is piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+```
+
+#### Event: 'unpipe'
+
+* `src` {[Readable][] Stream} The source stream that
+ [unpiped][`stream.unpipe()`] this writable
+
+This is emitted whenever the [`stream.unpipe()`][] method is called on a
+readable stream, removing this writable from its set of destinations.
+
+```js
+var writer = getWritableStreamSomehow();
+var reader = getReadableStreamSomehow();
+writer.on('unpipe', (src) => {
+ console.error('something has stopped piping into the writer');
+ assert.equal(src, reader);
+});
+reader.pipe(writer);
+reader.unpipe(writer);
+```
+
+#### writable.cork()
+
+Forces buffering of all writes.
+
+Buffered data will be flushed either at [`stream.uncork()`][] or at
+[`stream.end()`][stream-end] call.
+
+#### writable.end([chunk][, encoding][, callback])
+
+* `chunk` {String|Buffer} Optional data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Optional callback for when the stream is finished
+
+Call this method when no more data will be written to the stream. If supplied,
+the callback is attached as a listener on the [`'finish'`][] event.
+
+Calling [`stream.write()`][stream-write] after calling
+[`stream.end()`][stream-end] will raise an error.
+
+```js
+// write 'hello, ' and then end with 'world!'
+var file = fs.createWriteStream('example.txt');
+file.write('hello, ');
+file.end('world!');
+// writing more now is not allowed!
+```
+
+#### writable.setDefaultEncoding(encoding)
+
+* `encoding` {String} The new default encoding
+
+Sets the default encoding for a writable stream.
+
+#### writable.uncork()
+
+Flush all data, buffered since [`stream.cork()`][] call.
+
+#### writable.write(chunk[, encoding][, callback])
+
+* `chunk` {String|Buffer} The data to write
+* `encoding` {String} The encoding, if `chunk` is a String
+* `callback` {Function} Callback for when this chunk of data is flushed
+* Returns: {Boolean} `true` if the data was handled completely.
+
+This method writes some data to the underlying system, and calls the
+supplied callback once the data has been fully handled. If an error
+occurs, the callback may or may not be called with the error as its
+first argument. To detect write errors, listen for the `'error'` event.
+
+The return value indicates if you should continue writing right now.
+If the data had to be buffered internally, then it will return
+`false`. Otherwise, it will return `true`.
+
+This return value is strictly advisory. You MAY continue to write,
+even if it returns `false`. However, writes will be buffered in
+memory, so it is best not to do this excessively. Instead, wait for
+the [`'drain'`][] event before writing more data.
+
+
+## API for Stream Implementors
+
+
+
+To implement any sort of stream, the pattern is the same:
+
+1. Extend the appropriate parent class in your own subclass. (The
+ [`util.inherits()`][] method is particularly helpful for this.)
+2. Call the appropriate parent class constructor in your constructor,
+ to be sure that the internal mechanisms are set up properly.
+3. Implement one or more specific methods, as detailed below.
+
+The class to extend and the method(s) to implement depend on the sort
+of stream class you are writing:
+
+
+
+
+
+ Use-case
+
+
+ Class
+
+
+ Method(s) to implement
+
+
+
+
+
+ Reading only
+
+
+ [Readable](#stream_class_stream_readable_1)
+
+
+ [_read][stream-_read]
+
+
+
+
+ Writing only
+
+
+ [Writable](#stream_class_stream_writable_1)
+
+
+ [_write][stream-_write]
, [_writev][stream-_writev]
+
+
+
+
+ Reading and writing
+
+
+ [Duplex](#stream_class_stream_duplex_1)
+
+
+ [_read][stream-_read]
, [_write][stream-_write]
, [_writev][stream-_writev]
+
+
+
+
+ Operate on written data, then read the result
+
+
+ [Transform](#stream_class_stream_transform_1)
+
+
+ [_transform][stream-_transform]
, [_flush][stream-_flush]
+
+
+
+
+In your implementation code, it is very important to never call the methods
+described in [API for Stream Consumers][]. Otherwise, you can potentially cause
+adverse side effects in programs that consume your streaming interfaces.
+
+### Class: stream.Duplex
+
+
+
+A "duplex" stream is one that is both Readable and Writable, such as a TCP
+socket connection.
+
+Note that `stream.Duplex` is an abstract class designed to be extended
+with an underlying implementation of the [`stream._read(size)`][stream-_read]
+and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you
+would with a Readable or Writable stream class.
+
+Since JavaScript doesn't have multiple prototypal inheritance, this class
+prototypally inherits from Readable, and then parasitically from Writable. It is
+thus up to the user to implement both the low-level
+[`stream._read(n)`][stream-_read] method as well as the low-level
+[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension
+duplex classes.
+
+#### new stream.Duplex(options)
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors. Also has the following fields:
+ * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then
+ the stream will automatically end the readable side when the
+ writable side ends and vice versa.
+ * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
+ for readable side of the stream. Has no effect if `objectMode`
+ is `true`.
+ * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
+ for writable side of the stream. Has no effect if `objectMode`
+ is `true`.
+
+In classes that extend the Duplex class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+### Class: stream.PassThrough
+
+This is a trivial implementation of a [Transform][] stream that simply
+passes the input bytes across to the output. Its purpose is mainly
+for examples and testing, but there are occasionally use cases where
+it can come in handy as a building block for novel sorts of streams.
+
+### Class: stream.Readable
+
+
+
+`stream.Readable` is an abstract class designed to be extended with an
+underlying implementation of the [`stream._read(size)`][stream-_read] method.
+
+Please see [API for Stream Consumers][] for how to consume
+streams in your programs. What follows is an explanation of how to
+implement Readable streams in your programs.
+
+#### new stream.Readable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} The maximum number of bytes to store in
+ the internal buffer before ceasing to read from the underlying
+ resource. Default = `16384` (16kb), or `16` for `objectMode` streams
+ * `encoding` {String} If specified, then buffers will be decoded to
+ strings using the specified encoding. Default = `null`
+ * `objectMode` {Boolean} Whether this stream should behave
+ as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns
+ a single value instead of a Buffer of size n. Default = `false`
+ * `read` {Function} Implementation for the [`stream._read()`][stream-_read]
+ method.
+
+In classes that extend the Readable class, make sure to call the
+Readable constructor so that the buffering settings can be properly
+initialized.
+
+#### readable.\_read(size)
+
+* `size` {Number} Number of bytes to read asynchronously
+
+Note: **Implement this method, but do NOT call it directly.**
+
+This method is prefixed with an underscore because it is internal to the
+class that defines it and should only be called by the internal Readable
+class methods. All Readable stream implementations must provide a \_read
+method to fetch data from the underlying resource.
+
+When `_read()` is called, if data is available from the resource, the `_read()`
+implementation should start pushing that data into the read queue by calling
+[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from
+the resource and pushing data until push returns `false`, at which point it
+should stop reading from the resource. Only when `_read()` is called again after
+it has stopped should it start reading more data from the resource and pushing
+that data onto the queue.
+
+Note: once the `_read()` method is called, it will not be called again until
+the [`stream.push()`][stream-push] method is called.
+
+The `size` argument is advisory. Implementations where a "read" is a
+single call that returns data can use this to know how much data to
+fetch. Implementations where that is not relevant, such as TCP or
+TLS, may ignore this argument, and simply provide data whenever it
+becomes available. There is no need, for example to "wait" until
+`size` bytes are available before calling [`stream.push(chunk)`][stream-push].
+
+#### readable.push(chunk[, encoding])
+
+
+* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue
+* `encoding` {String} Encoding of String chunks. Must be a valid
+ Buffer encoding, such as `'utf8'` or `'ascii'`
+* return {Boolean} Whether or not more pushes should be performed
+
+Note: **This method should be called by Readable implementors, NOT
+by consumers of Readable streams.**
+
+If a value other than null is passed, The `push()` method adds a chunk of data
+into the queue for subsequent stream processors to consume. If `null` is
+passed, it signals the end of the stream (EOF), after which no more data
+can be written.
+
+The data added with `push()` can be pulled out by calling the
+[`stream.read()`][stream-read] method when the [`'readable'`][] event fires.
+
+This API is designed to be as flexible as possible. For example,
+you may be wrapping a lower-level source which has some sort of
+pause/resume mechanism, and a data callback. In those cases, you
+could wrap the low-level source object by doing something like this:
+
+```js
+// source is an object with readStop() and readStart() methods,
+// and an `ondata` member that gets called when it has data, and
+// an `onend` member that gets called when the data is over.
+
+util.inherits(SourceWrapper, Readable);
+
+function SourceWrapper(options) {
+ Readable.call(this, options);
+
+ this._source = getLowlevelSourceObject();
+
+ // Every time there's data, we push it into the internal buffer.
+ this._source.ondata = (chunk) => {
+ // if push() returns false, then we need to stop reading from source
+ if (!this.push(chunk))
+ this._source.readStop();
+ };
+
+ // When the source ends, we push the EOF-signaling `null` chunk
+ this._source.onend = () => {
+ this.push(null);
+ };
+}
+
+// _read will be called when the stream wants to pull more data in
+// the advisory size argument is ignored in this case.
+SourceWrapper.prototype._read = function(size) {
+ this._source.readStart();
+};
+```
+
+#### Example: A Counting Stream
+
+
+
+This is a basic example of a Readable stream. It emits the numerals
+from 1 to 1,000,000 in ascending order, and then ends.
+
+```js
+const Readable = require('stream').Readable;
+const util = require('util');
+util.inherits(Counter, Readable);
+
+function Counter(opt) {
+ Readable.call(this, opt);
+ this._max = 1000000;
+ this._index = 1;
+}
+
+Counter.prototype._read = function() {
+ var i = this._index++;
+ if (i > this._max)
+ this.push(null);
+ else {
+ var str = '' + i;
+ var buf = new Buffer(str, 'ascii');
+ this.push(buf);
+ }
+};
+```
+
+#### Example: SimpleProtocol v1 (Sub-optimal)
+
+This is similar to the `parseHeader` function described
+[here](#stream_readable_unshift_chunk), but implemented as a custom stream.
+Also, note that this implementation does not convert the incoming data to a
+string.
+
+However, this would be better implemented as a [Transform][] stream. See
+[SimpleProtocol v2][] for a better implementation.
+
+```js
+// A parser for a simple data protocol.
+// The "header" is a JSON object, followed by 2 \n characters, and
+// then a message body.
+//
+// NOTE: This can be done more simply as a Transform stream!
+// Using Readable directly for this is sub-optimal. See the
+// alternative example below under the Transform section.
+
+const Readable = require('stream').Readable;
+const util = require('util');
+
+util.inherits(SimpleProtocol, Readable);
+
+function SimpleProtocol(source, options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(source, options);
+
+ Readable.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+
+ // source is a readable stream, such as a socket or file
+ this._source = source;
+
+ source.on('end', () => {
+ this.push(null);
+ });
+
+ // give it a kick whenever the source is readable
+ // read(0) will not consume any bytes
+ source.on('readable', () => {
+ this.read(0);
+ });
+
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._read = function(n) {
+ if (!this._inBody) {
+ var chunk = this._source.read();
+
+ // if the source doesn't have data, we don't have data yet.
+ if (chunk === null)
+ return this.push('');
+
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ this.push('');
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // now, because we got some extra data, unshift the rest
+ // back into the read queue so that our consumer will see it.
+ var b = chunk.slice(split);
+ this.unshift(b);
+ // calling unshift by itself does not reset the reading state
+ // of the stream; since we're inside _read, doing an additional
+ // push('') will reset the state appropriately.
+ this.push('');
+
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+ }
+ } else {
+ // from there on, just provide the data to our consumer.
+ // careful not to push(null), since that would indicate EOF.
+ var chunk = this._source.read();
+ if (chunk) this.push(chunk);
+ }
+};
+
+// Usage:
+// var parser = new SimpleProtocol(source);
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+### Class: stream.Transform
+
+A "transform" stream is a duplex stream where the output is causally
+connected in some way to the input, such as a [zlib][] stream or a
+[crypto][] stream.
+
+There is no requirement that the output be the same size as the input,
+the same number of chunks, or arrive at the same time. For example, a
+Hash stream will only ever have a single chunk of output which is
+provided when the input is ended. A zlib stream will produce output
+that is either much smaller or much larger than its input.
+
+Rather than implement the [`stream._read()`][stream-_read] and
+[`stream._write()`][stream-_write] methods, Transform classes must implement the
+[`stream._transform()`][stream-_transform] method, and may optionally
+also implement the [`stream._flush()`][stream-_flush] method. (See below.)
+
+#### new stream.Transform([options])
+
+* `options` {Object} Passed to both Writable and Readable
+ constructors. Also has the following fields:
+ * `transform` {Function} Implementation for the
+ [`stream._transform()`][stream-_transform] method.
+ * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush]
+ method.
+
+In classes that extend the Transform class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### Events: 'finish' and 'end'
+
+The [`'finish'`][] and [`'end'`][] events are from the parent Writable
+and Readable classes respectively. The `'finish'` event is fired after
+[`stream.end()`][stream-end] is called and all chunks have been processed by
+[`stream._transform()`][stream-_transform], `'end'` is fired after all data has
+been output which is after the callback in [`stream._flush()`][stream-_flush]
+has been called.
+
+#### transform.\_flush(callback)
+
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done flushing any remaining data.
+
+Note: **This function MUST NOT be called directly.** It MAY be implemented
+by child classes, and if so, will be called by the internal Transform
+class methods only.
+
+In some cases, your transform operation may need to emit a bit more
+data at the end of the stream. For example, a `Zlib` compression
+stream will store up some internal state so that it can optimally
+compress the output. At the end, however, it needs to do the best it
+can with what is left, so that the data will be complete.
+
+In those cases, you can implement a `_flush()` method, which will be
+called at the very end, after all the written data is consumed, but
+before emitting [`'end'`][] to signal the end of the readable side. Just
+like with [`stream._transform()`][stream-_transform], call
+`transform.push(chunk)` zero or more times, as appropriate, and call `callback`
+when the flush operation is complete.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### transform.\_transform(chunk, encoding, callback)
+
+* `chunk` {Buffer|String} The chunk to be transformed. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument and data) when you are done processing the supplied chunk.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Transform
+class methods only.
+
+All Transform stream implementations must provide a `_transform()`
+method to accept input and produce output.
+
+`_transform()` should do whatever has to be done in this specific
+Transform class, to handle the bytes being written, and pass them off
+to the readable portion of the interface. Do asynchronous I/O,
+process things, and so on.
+
+Call `transform.push(outputChunk)` 0 or more times to generate output
+from this input chunk, depending on how much data you want to output
+as a result of this chunk.
+
+Call the callback function only when the current chunk is completely
+consumed. Note that there may or may not be output as a result of any
+particular input chunk. If you supply a second argument to the callback
+it will be passed to the push method. In other words the following are
+equivalent:
+
+```js
+transform.prototype._transform = function (data, encoding, callback) {
+ this.push(data);
+ callback();
+};
+
+transform.prototype._transform = function (data, encoding, callback) {
+ callback(null, data);
+};
+```
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### Example: `SimpleProtocol` parser v2
+
+The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple
+protocol parser can be implemented simply by using the higher level
+[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol
+v1` examples.
+
+In this example, rather than providing the input as an argument, it
+would be piped into the parser, which is a more idiomatic Node.js stream
+approach.
+
+```javascript
+const util = require('util');
+const Transform = require('stream').Transform;
+util.inherits(SimpleProtocol, Transform);
+
+function SimpleProtocol(options) {
+ if (!(this instanceof SimpleProtocol))
+ return new SimpleProtocol(options);
+
+ Transform.call(this, options);
+ this._inBody = false;
+ this._sawFirstCr = false;
+ this._rawHeader = [];
+ this.header = null;
+}
+
+SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
+ if (!this._inBody) {
+ // check if the chunk has a \n\n
+ var split = -1;
+ for (var i = 0; i < chunk.length; i++) {
+ if (chunk[i] === 10) { // '\n'
+ if (this._sawFirstCr) {
+ split = i;
+ break;
+ } else {
+ this._sawFirstCr = true;
+ }
+ } else {
+ this._sawFirstCr = false;
+ }
+ }
+
+ if (split === -1) {
+ // still waiting for the \n\n
+ // stash the chunk, and try again.
+ this._rawHeader.push(chunk);
+ } else {
+ this._inBody = true;
+ var h = chunk.slice(0, split);
+ this._rawHeader.push(h);
+ var header = Buffer.concat(this._rawHeader).toString();
+ try {
+ this.header = JSON.parse(header);
+ } catch (er) {
+ this.emit('error', new Error('invalid simple protocol data'));
+ return;
+ }
+ // and let them know that we are done parsing the header.
+ this.emit('header', this.header);
+
+ // now, because we got some extra data, emit this first.
+ this.push(chunk.slice(split));
+ }
+ } else {
+ // from there on, just provide the data to our consumer as-is.
+ this.push(chunk);
+ }
+ done();
+};
+
+// Usage:
+// var parser = new SimpleProtocol();
+// source.pipe(parser)
+// Now parser is a readable stream that will emit 'header'
+// with the parsed header data.
+```
+
+### Class: stream.Writable
+
+
+
+`stream.Writable` is an abstract class designed to be extended with an
+underlying implementation of the
+[`stream._write(chunk, encoding, callback)`][stream-_write] method.
+
+Please see [API for Stream Consumers][] for how to consume
+writable streams in your programs. What follows is an explanation of
+how to implement Writable streams in your programs.
+
+#### new stream.Writable([options])
+
+* `options` {Object}
+ * `highWaterMark` {Number} Buffer level when
+ [`stream.write()`][stream-write] starts returning `false`. Default = `16384`
+ (16kb), or `16` for `objectMode` streams.
+ * `decodeStrings` {Boolean} Whether or not to decode strings into
+ Buffers before passing them to [`stream._write()`][stream-_write].
+ Default = `true`
+ * `objectMode` {Boolean} Whether or not the
+ [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can
+ write arbitrary data instead of only `Buffer` / `String` data.
+ Default = `false`
+ * `write` {Function} Implementation for the
+ [`stream._write()`][stream-_write] method.
+ * `writev` {Function} Implementation for the
+ [`stream._writev()`][stream-_writev] method.
+
+In classes that extend the Writable class, make sure to call the
+constructor so that the buffering settings can be properly
+initialized.
+
+#### writable.\_write(chunk, encoding, callback)
+
+* `chunk` {Buffer|String} The chunk to be written. Will **always**
+ be a buffer unless the `decodeStrings` option was set to `false`.
+* `encoding` {String} If the chunk is a string, then this is the
+ encoding type. If chunk is a buffer, then this is the special
+ value - 'buffer', ignore it in this case.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunk.
+
+All Writable stream implementations must provide a
+[`stream._write()`][stream-_write] method to send data to the underlying
+resource.
+
+Note: **This function MUST NOT be called directly.** It should be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+Call the callback using the standard `callback(error)` pattern to
+signal that the write completed successfully or with an error.
+
+If the `decodeStrings` flag is set in the constructor options, then
+`chunk` may be a string rather than a Buffer, and `encoding` will
+indicate the sort of string that it is. This is to support
+implementations that have an optimized handling for certain string
+data encodings. If you do not explicitly set the `decodeStrings`
+option to `false`, then you can safely ignore the `encoding` argument,
+and assume that `chunk` will always be a Buffer.
+
+This method is prefixed with an underscore because it is internal to
+the class that defines it, and should not be called directly by user
+programs. However, you **are** expected to override this method in
+your own extension classes.
+
+#### writable.\_writev(chunks, callback)
+
+* `chunks` {Array} The chunks to be written. Each chunk has following
+ format: `{ chunk: ..., encoding: ... }`.
+* `callback` {Function} Call this function (optionally with an error
+ argument) when you are done processing the supplied chunks.
+
+Note: **This function MUST NOT be called directly.** It may be
+implemented by child classes, and called by the internal Writable
+class methods only.
+
+This function is completely optional to implement. In most cases it is
+unnecessary. If implemented, it will be called with all the chunks
+that are buffered in the write queue.
+
+
+## Simplified Constructor API
+
+
+
+In simple cases there is now the added benefit of being able to construct a
+stream without inheritance.
+
+This can be done by passing the appropriate methods as constructor options:
+
+Examples:
+
+### Duplex
+
+```js
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+
+ // push data onto the read queue, passing null
+ // will signal the end of the stream (EOF)
+ this.push(chunk);
+ },
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+
+ // An optional error can be passed as the first argument
+ next()
+ }
+});
+
+// or
+
+var duplex = new stream.Duplex({
+ read: function(n) {
+ // sets this._read under the hood
+
+ // push data onto the read queue, passing null
+ // will signal the end of the stream (EOF)
+ this.push(chunk);
+ },
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+
+ // An optional error can be passed as the first argument
+ next()
+ }
+});
+```
+
+### Readable
+
+```js
+var readable = new stream.Readable({
+ read: function(n) {
+ // sets this._read under the hood
+
+ // push data onto the read queue, passing null
+ // will signal the end of the stream (EOF)
+ this.push(chunk);
+ }
+});
+```
+
+### Transform
+
+```js
+var transform = new stream.Transform({
+ transform: function(chunk, encoding, next) {
+ // sets this._transform under the hood
+
+ // generate output as many times as needed
+ // this.push(chunk);
+
+ // call when the current chunk is consumed
+ next();
+ },
+ flush: function(done) {
+ // sets this._flush under the hood
+
+ // generate output as many times as needed
+ // this.push(chunk);
+
+ done();
+ }
+});
+```
+
+### Writable
+
+```js
+var writable = new stream.Writable({
+ write: function(chunk, encoding, next) {
+ // sets this._write under the hood
+
+ // An optional error can be passed as the first argument
+ next()
+ }
+});
+
+// or
+
+var writable = new stream.Writable({
+ writev: function(chunks, next) {
+ // sets this._writev under the hood
+
+ // An optional error can be passed as the first argument
+ next()
+ }
+});
+```
+
+## Streams: Under the Hood
+
+
+
+### Buffering
+
+
+
+Both Writable and Readable streams will buffer data on an internal
+object which can be retrieved from `_writableState.getBuffer()` or
+`_readableState.buffer`, respectively.
+
+The amount of data that will potentially be buffered depends on the
+`highWaterMark` option which is passed into the constructor.
+
+Buffering in Readable streams happens when the implementation calls
+[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not
+call [`stream.read()`][stream-read], then the data will sit in the internal
+queue until it is consumed.
+
+Buffering in Writable streams happens when the user calls
+[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`.
+
+The purpose of streams, especially with the [`stream.pipe()`][] method, is to
+limit the buffering of data to acceptable levels, so that sources and
+destinations of varying speed will not overwhelm the available memory.
+
+### Compatibility with Older Node.js Versions
+
+
+
+In versions of Node.js prior to v0.10, the Readable stream interface was
+simpler, but also less powerful and less useful.
+
+* Rather than waiting for you to call the [`stream.read()`][stream-read] method,
+ [`'data'`][] events would start emitting immediately. If you needed to do
+ some I/O to decide how to handle data, then you had to store the chunks
+ in some kind of buffer so that they would not be lost.
+* The [`stream.pause()`][stream-pause] method was advisory, rather than
+ guaranteed. This meant that you still had to be prepared to receive
+ [`'data'`][] events even when the stream was in a paused state.
+
+In Node.js v0.10, the [Readable][] class was added.
+For backwards compatibility with older Node.js programs, Readable streams
+switch into "flowing mode" when a [`'data'`][] event handler is added, or
+when the [`stream.resume()`][stream-resume] method is called. The effect is
+that, even if you are not using the new [`stream.read()`][stream-read] method
+and [`'readable'`][] event, you no longer have to worry about losing
+[`'data'`][] chunks.
+
+Most programs will continue to function normally. However, this
+introduces an edge case in the following conditions:
+
+* No [`'data'`][] event handler is added.
+* The [`stream.resume()`][stream-resume] method is never called.
+* The stream is not piped to any writable destination.
+
+For example, consider the following code:
+
+```js
+// WARNING! BROKEN!
+net.createServer((socket) => {
+
+ // we add an 'end' method, but never consume the data
+ socket.on('end', () => {
+ // It will never get here.
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+}).listen(1337);
+```
+
+In versions of Node.js prior to v0.10, the incoming message data would be
+simply discarded. However, in Node.js v0.10 and beyond,
+the socket will remain paused forever.
+
+The workaround in this situation is to call the
+[`stream.resume()`][stream-resume] method to start the flow of data:
+
+```js
+// Workaround
+net.createServer((socket) => {
+
+ socket.on('end', () => {
+ socket.end('I got your message (but didnt read it)\n');
+ });
+
+ // start the flow of data, discarding it.
+ socket.resume();
+
+}).listen(1337);
+```
+
+In addition to new Readable streams switching into flowing mode,
+pre-v0.10 style streams can be wrapped in a Readable class using the
+[`stream.wrap()`][] method.
+
+
+### Object Mode
+
+
+
+Normally, Streams operate on Strings and Buffers exclusively.
+
+Streams that are in **object mode** can emit generic JavaScript values
+other than Buffers and Strings.
+
+A Readable stream in object mode will always return a single item from
+a call to [`stream.read(size)`][stream-read], regardless of what the size
+argument is.
+
+A Writable stream in object mode will always ignore the `encoding`
+argument to [`stream.write(data, encoding)`][stream-write].
+
+The special value `null` still retains its special value for object
+mode streams. That is, for object mode readable streams, `null` as a
+return value from [`stream.read()`][stream-read] indicates that there is no more
+data, and [`stream.push(null)`][stream-push] will signal the end of stream data
+(`EOF`).
+
+No streams in Node.js core are object mode streams. This pattern is only
+used by userland streaming libraries.
+
+You should set `objectMode` in your stream child class constructor on
+the options object. Setting `objectMode` mid-stream is not safe.
+
+For Duplex streams `objectMode` can be set exclusively for readable or
+writable side with `readableObjectMode` and `writableObjectMode`
+respectively. These options can be used to implement parsers and
+serializers with Transform streams.
+
+```js
+const util = require('util');
+const StringDecoder = require('string_decoder').StringDecoder;
+const Transform = require('stream').Transform;
+util.inherits(JSONParseStream, Transform);
+
+// Gets \n-delimited JSON string data, and emits the parsed objects
+function JSONParseStream() {
+ if (!(this instanceof JSONParseStream))
+ return new JSONParseStream();
+
+ Transform.call(this, { readableObjectMode : true });
+
+ this._buffer = '';
+ this._decoder = new StringDecoder('utf8');
+}
+
+JSONParseStream.prototype._transform = function(chunk, encoding, cb) {
+ this._buffer += this._decoder.write(chunk);
+ // split on newlines
+ var lines = this._buffer.split(/\r?\n/);
+ // keep the last partial line buffered
+ this._buffer = lines.pop();
+ for (var l = 0; l < lines.length; l++) {
+ var line = lines[l];
+ try {
+ var obj = JSON.parse(line);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+
+JSONParseStream.prototype._flush = function(cb) {
+ // Just handle any leftover
+ var rem = this._buffer.trim();
+ if (rem) {
+ try {
+ var obj = JSON.parse(rem);
+ } catch (er) {
+ this.emit('error', er);
+ return;
+ }
+ // push the parsed object out to the readable consumer
+ this.push(obj);
+ }
+ cb();
+};
+```
+
+### `stream.read(0)`
+
+There are some cases where you want to trigger a refresh of the
+underlying readable stream mechanisms, without actually consuming any
+data. In that case, you can call `stream.read(0)`, which will always
+return null.
+
+If the internal read buffer is below the `highWaterMark`, and the
+stream is not currently reading, then calling `stream.read(0)` will trigger
+a low-level [`stream._read()`][stream-_read] call.
+
+There is almost never a need to do this. However, you will see some
+cases in Node.js's internals where this is done, particularly in the
+Readable stream class internals.
+
+### `stream.push('')`
+
+Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
+interesting side effect. Because it *is* a call to
+[`stream.push()`][stream-push], it will end the `reading` process. However, it
+does *not* add any data to the readable buffer, so there's nothing for
+a user to consume.
+
+Very rarely, there are cases where you have no data to provide now,
+but the consumer of your stream (or, perhaps, another bit of your own
+code) will know when to check again, by calling [`stream.read(0)`][stream-read].
+In those cases, you *may* call `stream.push('')`.
+
+So far, the only use case for this functionality is in the
+[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you
+find that you have to use `stream.push('')`, please consider another
+approach, because it almost certainly indicates that something is
+horribly wrong.
+
+[`'data'`]: #stream_event_data
+[`'drain'`]: #stream_event_drain
+[`'end'`]: #stream_event_end
+[`'finish'`]: #stream_event_finish
+[`'readable'`]: #stream_event_readable
+[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.9.1/api/buffer.html#buffer_buf_tostring_encoding_start_end
+[`EventEmitter`]: https://nodejs.org/docs/v5.9.1/api/events.html#events_class_eventemitter
+[`process.stderr`]: https://nodejs.org/docs/v5.9.1/api/process.html#process_process_stderr
+[`process.stdin`]: https://nodejs.org/docs/v5.9.1/api/process.html#process_process_stdin
+[`process.stdout`]: https://nodejs.org/docs/v5.9.1/api/process.html#process_process_stdout
+[`stream.cork()`]: #stream_writable_cork
+[`stream.pipe()`]: #stream_readable_pipe_destination_options
+[`stream.uncork()`]: #stream_writable_uncork
+[`stream.unpipe()`]: #stream_readable_unpipe_destination
+[`stream.wrap()`]: #stream_readable_wrap_stream
+[`tls.CryptoStream`]: https://nodejs.org/docs/v5.9.1/api/tls.html#tls_class_cryptostream
+[`util.inherits()`]: https://nodejs.org/docs/v5.9.1/api/util.html#util_util_inherits_constructor_superconstructor
+[API for Stream Consumers]: #stream_api_for_stream_consumers
+[API for Stream Implementors]: #stream_api_for_stream_implementors
+[child process stdin]: https://nodejs.org/docs/v5.9.1/api/child_process.html#child_process_child_stdin
+[child process stdout and stderr]: https://nodejs.org/docs/v5.9.1/api/child_process.html#child_process_child_stdout
+[Compatibility]: #stream_compatibility_with_older_node_js_versions
+[crypto]: crypto.html
+[Duplex]: #stream_class_stream_duplex
+[fs read streams]: https://nodejs.org/docs/v5.9.1/api/fs.html#fs_class_fs_readstream
+[fs write streams]: https://nodejs.org/docs/v5.9.1/api/fs.html#fs_class_fs_writestream
+[HTTP requests, on the client]: https://nodejs.org/docs/v5.9.1/api/http.html#http_class_http_clientrequest
+[HTTP responses, on the server]: https://nodejs.org/docs/v5.9.1/api/http.html#http_class_http_serverresponse
+[http-incoming-message]: https://nodejs.org/docs/v5.9.1/api/http.html#http_class_http_incomingmessage
+[Object mode]: #stream_object_mode
+[Readable]: #stream_class_stream_readable
+[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2
+[stream-_flush]: #stream_transform_flush_callback
+[stream-_read]: #stream_readable_read_size_1
+[stream-_transform]: #stream_transform_transform_chunk_encoding_callback
+[stream-_write]: #stream_writable_write_chunk_encoding_callback_1
+[stream-_writev]: #stream_writable_writev_chunks_callback
+[stream-end]: #stream_writable_end_chunk_encoding_callback
+[stream-pause]: #stream_readable_pause
+[stream-push]: #stream_readable_push_chunk_encoding
+[stream-read]: #stream_readable_read_size
+[stream-resume]: #stream_readable_resume
+[stream-write]: #stream_writable_write_chunk_encoding_callback
+[TCP sockets]: https://nodejs.org/docs/v5.9.1/api/net.html#net_class_net_socket
+[Transform]: #stream_class_stream_transform
+[Writable]: #stream_class_stream_writable
+[zlib]: zlib.html
diff --git a/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
new file mode 100644
index 00000000000000..c141a99c26c638
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md
@@ -0,0 +1,58 @@
+# streams WG Meeting 2015-01-30
+
+## Links
+
+* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
+* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
+* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
+
+## Agenda
+
+Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
+
+* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
+* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
+* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
+* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
+
+## Minutes
+
+### adopt a charter
+
+* group: +1's all around
+
+### What versioning scheme should be adopted?
+* group: +1’s 3.0.0
+* domenic+group: pulling in patches from other sources where appropriate
+* mikeal: version independently, suggesting versions for io.js
+* mikeal+domenic: work with TC to notify in advance of changes
+simpler stream creation
+
+### streamline creation of streams
+* sam: streamline creation of streams
+* domenic: nice simple solution posted
+ but, we lose the opportunity to change the model
+ may not be backwards incompatible (double check keys)
+
+ **action item:** domenic will check
+
+### remove implicit flowing of streams on(‘data’)
+* add isFlowing / isPaused
+* mikeal: worrying that we’re documenting polyfill methods – confuses users
+* domenic: more reflective API is probably good, with warning labels for users
+* new section for mad scientists (reflective stream access)
+* calvin: name the “third state”
+* mikeal: maybe borrow the name from whatwg?
+* domenic: we’re missing the “third state”
+* consensus: kind of difficult to name the third state
+* mikeal: figure out differences in states / compat
+* mathias: always flow on data – eliminates third state
+ * explore what it breaks
+
+**action items:**
+* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
+* ask rod/build for infrastructure
+* **chris**: explore the “flow on data” approach
+* add isPaused/isFlowing
+* add new docs section
+* move isPaused to that section
diff --git a/deps/npm/node_modules/readable-stream/float.patch b/deps/npm/node_modules/readable-stream/float.patch
deleted file mode 100644
index 7abb6dc30b21bf..00000000000000
--- a/deps/npm/node_modules/readable-stream/float.patch
+++ /dev/null
@@ -1,922 +0,0 @@
-diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js
-index c5a741c..a2e0d8e 100644
---- a/lib/_stream_duplex.js
-+++ b/lib/_stream_duplex.js
-@@ -26,8 +26,8 @@
-
- module.exports = Duplex;
- var util = require('util');
--var Readable = require('_stream_readable');
--var Writable = require('_stream_writable');
-+var Readable = require('./_stream_readable');
-+var Writable = require('./_stream_writable');
-
- util.inherits(Duplex, Readable);
-
-diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
-index a5e9864..330c247 100644
---- a/lib/_stream_passthrough.js
-+++ b/lib/_stream_passthrough.js
-@@ -25,7 +25,7 @@
-
- module.exports = PassThrough;
-
--var Transform = require('_stream_transform');
-+var Transform = require('./_stream_transform');
- var util = require('util');
- util.inherits(PassThrough, Transform);
-
-diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
-index 0c3fe3e..90a8298 100644
---- a/lib/_stream_readable.js
-+++ b/lib/_stream_readable.js
-@@ -23,10 +23,34 @@ module.exports = Readable;
- Readable.ReadableState = ReadableState;
-
- var EE = require('events').EventEmitter;
-+if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
-+ return emitter.listeners(type).length;
-+};
-+
-+if (!global.setImmediate) global.setImmediate = function setImmediate(fn) {
-+ return setTimeout(fn, 0);
-+};
-+if (!global.clearImmediate) global.clearImmediate = function clearImmediate(i) {
-+ return clearTimeout(i);
-+};
-+
- var Stream = require('stream');
- var util = require('util');
-+if (!util.isUndefined) {
-+ var utilIs = require('core-util-is');
-+ for (var f in utilIs) {
-+ util[f] = utilIs[f];
-+ }
-+}
- var StringDecoder;
--var debug = util.debuglog('stream');
-+var debug;
-+if (util.debuglog)
-+ debug = util.debuglog('stream');
-+else try {
-+ debug = require('debuglog')('stream');
-+} catch (er) {
-+ debug = function() {};
-+}
-
- util.inherits(Readable, Stream);
-
-@@ -380,7 +404,7 @@ function chunkInvalid(state, chunk) {
-
-
- function onEofChunk(stream, state) {
-- if (state.decoder && !state.ended) {
-+ if (state.decoder && !state.ended && state.decoder.end) {
- var chunk = state.decoder.end();
- if (chunk && chunk.length) {
- state.buffer.push(chunk);
-diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js
-index b1f9fcc..b0caf57 100644
---- a/lib/_stream_transform.js
-+++ b/lib/_stream_transform.js
-@@ -64,8 +64,14 @@
-
- module.exports = Transform;
-
--var Duplex = require('_stream_duplex');
-+var Duplex = require('./_stream_duplex');
- var util = require('util');
-+if (!util.isUndefined) {
-+ var utilIs = require('core-util-is');
-+ for (var f in utilIs) {
-+ util[f] = utilIs[f];
-+ }
-+}
- util.inherits(Transform, Duplex);
-
-
-diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
-index ba2e920..f49288b 100644
---- a/lib/_stream_writable.js
-+++ b/lib/_stream_writable.js
-@@ -27,6 +27,12 @@ module.exports = Writable;
- Writable.WritableState = WritableState;
-
- var util = require('util');
-+if (!util.isUndefined) {
-+ var utilIs = require('core-util-is');
-+ for (var f in utilIs) {
-+ util[f] = utilIs[f];
-+ }
-+}
- var Stream = require('stream');
-
- util.inherits(Writable, Stream);
-@@ -119,7 +125,7 @@ function WritableState(options, stream) {
- function Writable(options) {
- // Writable ctor is applied to Duplexes, though they're not
- // instanceof Writable, they're instanceof Readable.
-- if (!(this instanceof Writable) && !(this instanceof Stream.Duplex))
-+ if (!(this instanceof Writable) && !(this instanceof require('./_stream_duplex')))
- return new Writable(options);
-
- this._writableState = new WritableState(options, this);
-diff --git a/test/simple/test-stream-big-push.js b/test/simple/test-stream-big-push.js
-index e3787e4..8cd2127 100644
---- a/test/simple/test-stream-big-push.js
-+++ b/test/simple/test-stream-big-push.js
-@@ -21,7 +21,7 @@
-
- var common = require('../common');
- var assert = require('assert');
--var stream = require('stream');
-+var stream = require('../../');
- var str = 'asdfasdfasdfasdfasdf';
-
- var r = new stream.Readable({
-diff --git a/test/simple/test-stream-end-paused.js b/test/simple/test-stream-end-paused.js
-index bb73777..d40efc7 100644
---- a/test/simple/test-stream-end-paused.js
-+++ b/test/simple/test-stream-end-paused.js
-@@ -25,7 +25,7 @@ var gotEnd = false;
-
- // Make sure we don't miss the end event for paused 0-length streams
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
- var stream = new Readable();
- var calledRead = false;
- stream._read = function() {
-diff --git a/test/simple/test-stream-pipe-after-end.js b/test/simple/test-stream-pipe-after-end.js
-index b46ee90..0be8366 100644
---- a/test/simple/test-stream-pipe-after-end.js
-+++ b/test/simple/test-stream-pipe-after-end.js
-@@ -22,8 +22,8 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('_stream_readable');
--var Writable = require('_stream_writable');
-+var Readable = require('../../lib/_stream_readable');
-+var Writable = require('../../lib/_stream_writable');
- var util = require('util');
-
- util.inherits(TestReadable, Readable);
-diff --git a/test/simple/test-stream-pipe-cleanup.js b/test/simple/test-stream-pipe-cleanup.js
-deleted file mode 100644
-index f689358..0000000
---- a/test/simple/test-stream-pipe-cleanup.js
-+++ /dev/null
-@@ -1,122 +0,0 @@
--// Copyright Joyent, Inc. and other Node contributors.
--//
--// Permission is hereby granted, free of charge, to any person obtaining a
--// copy of this software and associated documentation files (the
--// "Software"), to deal in the Software without restriction, including
--// without limitation the rights to use, copy, modify, merge, publish,
--// distribute, sublicense, and/or sell copies of the Software, and to permit
--// persons to whom the Software is furnished to do so, subject to the
--// following conditions:
--//
--// The above copyright notice and this permission notice shall be included
--// in all copies or substantial portions of the Software.
--//
--// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
--// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
--// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
--// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
--// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
--// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
--// USE OR OTHER DEALINGS IN THE SOFTWARE.
--
--// This test asserts that Stream.prototype.pipe does not leave listeners
--// hanging on the source or dest.
--
--var common = require('../common');
--var stream = require('stream');
--var assert = require('assert');
--var util = require('util');
--
--function Writable() {
-- this.writable = true;
-- this.endCalls = 0;
-- stream.Stream.call(this);
--}
--util.inherits(Writable, stream.Stream);
--Writable.prototype.end = function() {
-- this.endCalls++;
--};
--
--Writable.prototype.destroy = function() {
-- this.endCalls++;
--};
--
--function Readable() {
-- this.readable = true;
-- stream.Stream.call(this);
--}
--util.inherits(Readable, stream.Stream);
--
--function Duplex() {
-- this.readable = true;
-- Writable.call(this);
--}
--util.inherits(Duplex, Writable);
--
--var i = 0;
--var limit = 100;
--
--var w = new Writable();
--
--var r;
--
--for (i = 0; i < limit; i++) {
-- r = new Readable();
-- r.pipe(w);
-- r.emit('end');
--}
--assert.equal(0, r.listeners('end').length);
--assert.equal(limit, w.endCalls);
--
--w.endCalls = 0;
--
--for (i = 0; i < limit; i++) {
-- r = new Readable();
-- r.pipe(w);
-- r.emit('close');
--}
--assert.equal(0, r.listeners('close').length);
--assert.equal(limit, w.endCalls);
--
--w.endCalls = 0;
--
--r = new Readable();
--
--for (i = 0; i < limit; i++) {
-- w = new Writable();
-- r.pipe(w);
-- w.emit('close');
--}
--assert.equal(0, w.listeners('close').length);
--
--r = new Readable();
--w = new Writable();
--var d = new Duplex();
--r.pipe(d); // pipeline A
--d.pipe(w); // pipeline B
--assert.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
--assert.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
--assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
--assert.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
--assert.equal(w.listeners('end').length, 0);
--assert.equal(w.listeners('close').length, 1); // B.cleanup
--
--r.emit('end');
--assert.equal(d.endCalls, 1);
--assert.equal(w.endCalls, 0);
--assert.equal(r.listeners('end').length, 0);
--assert.equal(r.listeners('close').length, 0);
--assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
--assert.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
--assert.equal(w.listeners('end').length, 0);
--assert.equal(w.listeners('close').length, 1); // B.cleanup
--
--d.emit('end');
--assert.equal(d.endCalls, 1);
--assert.equal(w.endCalls, 1);
--assert.equal(r.listeners('end').length, 0);
--assert.equal(r.listeners('close').length, 0);
--assert.equal(d.listeners('end').length, 0);
--assert.equal(d.listeners('close').length, 0);
--assert.equal(w.listeners('end').length, 0);
--assert.equal(w.listeners('close').length, 0);
-diff --git a/test/simple/test-stream-pipe-error-handling.js b/test/simple/test-stream-pipe-error-handling.js
-index c5d724b..c7d6b7d 100644
---- a/test/simple/test-stream-pipe-error-handling.js
-+++ b/test/simple/test-stream-pipe-error-handling.js
-@@ -21,7 +21,7 @@
-
- var common = require('../common');
- var assert = require('assert');
--var Stream = require('stream').Stream;
-+var Stream = require('../../').Stream;
-
- (function testErrorListenerCatches() {
- var source = new Stream();
-diff --git a/test/simple/test-stream-pipe-event.js b/test/simple/test-stream-pipe-event.js
-index cb9d5fe..56f8d61 100644
---- a/test/simple/test-stream-pipe-event.js
-+++ b/test/simple/test-stream-pipe-event.js
-@@ -20,7 +20,7 @@
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- var common = require('../common');
--var stream = require('stream');
-+var stream = require('../../');
- var assert = require('assert');
- var util = require('util');
-
-diff --git a/test/simple/test-stream-push-order.js b/test/simple/test-stream-push-order.js
-index f2e6ec2..a5c9bf9 100644
---- a/test/simple/test-stream-push-order.js
-+++ b/test/simple/test-stream-push-order.js
-@@ -20,7 +20,7 @@
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- var common = require('../common.js');
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
- var assert = require('assert');
-
- var s = new Readable({
-diff --git a/test/simple/test-stream-push-strings.js b/test/simple/test-stream-push-strings.js
-index 06f43dc..1701a9a 100644
---- a/test/simple/test-stream-push-strings.js
-+++ b/test/simple/test-stream-push-strings.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
- var util = require('util');
-
- util.inherits(MyStream, Readable);
-diff --git a/test/simple/test-stream-readable-event.js b/test/simple/test-stream-readable-event.js
-index ba6a577..a8e6f7b 100644
---- a/test/simple/test-stream-readable-event.js
-+++ b/test/simple/test-stream-readable-event.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
-
- (function first() {
- // First test, not reading when the readable is added.
-diff --git a/test/simple/test-stream-readable-flow-recursion.js b/test/simple/test-stream-readable-flow-recursion.js
-index 2891ad6..11689ba 100644
---- a/test/simple/test-stream-readable-flow-recursion.js
-+++ b/test/simple/test-stream-readable-flow-recursion.js
-@@ -27,7 +27,7 @@ var assert = require('assert');
- // more data continuously, but without triggering a nextTick
- // warning or RangeError.
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
-
- // throw an error if we trigger a nextTick warning.
- process.throwDeprecation = true;
-diff --git a/test/simple/test-stream-unshift-empty-chunk.js b/test/simple/test-stream-unshift-empty-chunk.js
-index 0c96476..7827538 100644
---- a/test/simple/test-stream-unshift-empty-chunk.js
-+++ b/test/simple/test-stream-unshift-empty-chunk.js
-@@ -24,7 +24,7 @@ var assert = require('assert');
-
- // This test verifies that stream.unshift(Buffer(0)) or
- // stream.unshift('') does not set state.reading=false.
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
-
- var r = new Readable();
- var nChunks = 10;
-diff --git a/test/simple/test-stream-unshift-read-race.js b/test/simple/test-stream-unshift-read-race.js
-index 83fd9fa..17c18aa 100644
---- a/test/simple/test-stream-unshift-read-race.js
-+++ b/test/simple/test-stream-unshift-read-race.js
-@@ -29,7 +29,7 @@ var assert = require('assert');
- // 3. push() after the EOF signaling null is an error.
- // 4. _read() is not called after pushing the EOF null chunk.
-
--var stream = require('stream');
-+var stream = require('../../');
- var hwm = 10;
- var r = stream.Readable({ highWaterMark: hwm });
- var chunks = 10;
-@@ -51,7 +51,14 @@ r._read = function(n) {
-
- function push(fast) {
- assert(!pushedNull, 'push() after null push');
-- var c = pos >= data.length ? null : data.slice(pos, pos + n);
-+ var c;
-+ if (pos >= data.length)
-+ c = null;
-+ else {
-+ if (n + pos > data.length)
-+ n = data.length - pos;
-+ c = data.slice(pos, pos + n);
-+ }
- pushedNull = c === null;
- if (fast) {
- pos += n;
-diff --git a/test/simple/test-stream-writev.js b/test/simple/test-stream-writev.js
-index 5b49e6e..b5321f3 100644
---- a/test/simple/test-stream-writev.js
-+++ b/test/simple/test-stream-writev.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var stream = require('stream');
-+var stream = require('../../');
-
- var queue = [];
- for (var decode = 0; decode < 2; decode++) {
-diff --git a/test/simple/test-stream2-basic.js b/test/simple/test-stream2-basic.js
-index 3814bf0..248c1be 100644
---- a/test/simple/test-stream2-basic.js
-+++ b/test/simple/test-stream2-basic.js
-@@ -21,7 +21,7 @@
-
-
- var common = require('../common.js');
--var R = require('_stream_readable');
-+var R = require('../../lib/_stream_readable');
- var assert = require('assert');
-
- var util = require('util');
-diff --git a/test/simple/test-stream2-compatibility.js b/test/simple/test-stream2-compatibility.js
-index 6cdd4e9..f0fa84b 100644
---- a/test/simple/test-stream2-compatibility.js
-+++ b/test/simple/test-stream2-compatibility.js
-@@ -21,7 +21,7 @@
-
-
- var common = require('../common.js');
--var R = require('_stream_readable');
-+var R = require('../../lib/_stream_readable');
- var assert = require('assert');
-
- var util = require('util');
-diff --git a/test/simple/test-stream2-finish-pipe.js b/test/simple/test-stream2-finish-pipe.js
-index 39b274f..006a19b 100644
---- a/test/simple/test-stream2-finish-pipe.js
-+++ b/test/simple/test-stream2-finish-pipe.js
-@@ -20,7 +20,7 @@
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- var common = require('../common.js');
--var stream = require('stream');
-+var stream = require('../../');
- var Buffer = require('buffer').Buffer;
-
- var r = new stream.Readable();
-diff --git a/test/simple/test-stream2-fs.js b/test/simple/test-stream2-fs.js
-deleted file mode 100644
-index e162406..0000000
---- a/test/simple/test-stream2-fs.js
-+++ /dev/null
-@@ -1,72 +0,0 @@
--// Copyright Joyent, Inc. and other Node contributors.
--//
--// Permission is hereby granted, free of charge, to any person obtaining a
--// copy of this software and associated documentation files (the
--// "Software"), to deal in the Software without restriction, including
--// without limitation the rights to use, copy, modify, merge, publish,
--// distribute, sublicense, and/or sell copies of the Software, and to permit
--// persons to whom the Software is furnished to do so, subject to the
--// following conditions:
--//
--// The above copyright notice and this permission notice shall be included
--// in all copies or substantial portions of the Software.
--//
--// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
--// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
--// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
--// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
--// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
--// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
--// USE OR OTHER DEALINGS IN THE SOFTWARE.
--
--
--var common = require('../common.js');
--var R = require('_stream_readable');
--var assert = require('assert');
--
--var fs = require('fs');
--var FSReadable = fs.ReadStream;
--
--var path = require('path');
--var file = path.resolve(common.fixturesDir, 'x1024.txt');
--
--var size = fs.statSync(file).size;
--
--var expectLengths = [1024];
--
--var util = require('util');
--var Stream = require('stream');
--
--util.inherits(TestWriter, Stream);
--
--function TestWriter() {
-- Stream.apply(this);
-- this.buffer = [];
-- this.length = 0;
--}
--
--TestWriter.prototype.write = function(c) {
-- this.buffer.push(c.toString());
-- this.length += c.length;
-- return true;
--};
--
--TestWriter.prototype.end = function(c) {
-- if (c) this.buffer.push(c.toString());
-- this.emit('results', this.buffer);
--}
--
--var r = new FSReadable(file);
--var w = new TestWriter();
--
--w.on('results', function(res) {
-- console.error(res, w.length);
-- assert.equal(w.length, size);
-- var l = 0;
-- assert.deepEqual(res.map(function (c) {
-- return c.length;
-- }), expectLengths);
-- console.log('ok');
--});
--
--r.pipe(w);
-diff --git a/test/simple/test-stream2-httpclient-response-end.js b/test/simple/test-stream2-httpclient-response-end.js
-deleted file mode 100644
-index 15cffc2..0000000
---- a/test/simple/test-stream2-httpclient-response-end.js
-+++ /dev/null
-@@ -1,52 +0,0 @@
--// Copyright Joyent, Inc. and other Node contributors.
--//
--// Permission is hereby granted, free of charge, to any person obtaining a
--// copy of this software and associated documentation files (the
--// "Software"), to deal in the Software without restriction, including
--// without limitation the rights to use, copy, modify, merge, publish,
--// distribute, sublicense, and/or sell copies of the Software, and to permit
--// persons to whom the Software is furnished to do so, subject to the
--// following conditions:
--//
--// The above copyright notice and this permission notice shall be included
--// in all copies or substantial portions of the Software.
--//
--// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
--// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
--// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
--// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
--// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
--// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
--// USE OR OTHER DEALINGS IN THE SOFTWARE.
--
--var common = require('../common.js');
--var assert = require('assert');
--var http = require('http');
--var msg = 'Hello';
--var readable_event = false;
--var end_event = false;
--var server = http.createServer(function(req, res) {
-- res.writeHead(200, {'Content-Type': 'text/plain'});
-- res.end(msg);
--}).listen(common.PORT, function() {
-- http.get({port: common.PORT}, function(res) {
-- var data = '';
-- res.on('readable', function() {
-- console.log('readable event');
-- readable_event = true;
-- data += res.read();
-- });
-- res.on('end', function() {
-- console.log('end event');
-- end_event = true;
-- assert.strictEqual(msg, data);
-- server.close();
-- });
-- });
--});
--
--process.on('exit', function() {
-- assert(readable_event);
-- assert(end_event);
--});
--
-diff --git a/test/simple/test-stream2-large-read-stall.js b/test/simple/test-stream2-large-read-stall.js
-index 2fbfbca..667985b 100644
---- a/test/simple/test-stream2-large-read-stall.js
-+++ b/test/simple/test-stream2-large-read-stall.js
-@@ -30,7 +30,7 @@ var PUSHSIZE = 20;
- var PUSHCOUNT = 1000;
- var HWM = 50;
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
- var r = new Readable({
- highWaterMark: HWM
- });
-@@ -39,23 +39,23 @@ var rs = r._readableState;
- r._read = push;
-
- r.on('readable', function() {
-- console.error('>> readable');
-+ //console.error('>> readable');
- do {
-- console.error(' > read(%d)', READSIZE);
-+ //console.error(' > read(%d)', READSIZE);
- var ret = r.read(READSIZE);
-- console.error(' < %j (%d remain)', ret && ret.length, rs.length);
-+ //console.error(' < %j (%d remain)', ret && ret.length, rs.length);
- } while (ret && ret.length === READSIZE);
-
-- console.error('<< after read()',
-- ret && ret.length,
-- rs.needReadable,
-- rs.length);
-+ //console.error('<< after read()',
-+ // ret && ret.length,
-+ // rs.needReadable,
-+ // rs.length);
- });
-
- var endEmitted = false;
- r.on('end', function() {
- endEmitted = true;
-- console.error('end');
-+ //console.error('end');
- });
-
- var pushes = 0;
-@@ -64,11 +64,11 @@ function push() {
- return;
-
- if (pushes++ === PUSHCOUNT) {
-- console.error(' push(EOF)');
-+ //console.error(' push(EOF)');
- return r.push(null);
- }
-
-- console.error(' push #%d', pushes);
-+ //console.error(' push #%d', pushes);
- if (r.push(new Buffer(PUSHSIZE)))
- setTimeout(push);
- }
-diff --git a/test/simple/test-stream2-objects.js b/test/simple/test-stream2-objects.js
-index 3e6931d..ff47d89 100644
---- a/test/simple/test-stream2-objects.js
-+++ b/test/simple/test-stream2-objects.js
-@@ -21,8 +21,8 @@
-
-
- var common = require('../common.js');
--var Readable = require('_stream_readable');
--var Writable = require('_stream_writable');
-+var Readable = require('../../lib/_stream_readable');
-+var Writable = require('../../lib/_stream_writable');
- var assert = require('assert');
-
- // tiny node-tap lookalike.
-diff --git a/test/simple/test-stream2-pipe-error-handling.js b/test/simple/test-stream2-pipe-error-handling.js
-index cf7531c..e3f3e4e 100644
---- a/test/simple/test-stream2-pipe-error-handling.js
-+++ b/test/simple/test-stream2-pipe-error-handling.js
-@@ -21,7 +21,7 @@
-
- var common = require('../common');
- var assert = require('assert');
--var stream = require('stream');
-+var stream = require('../../');
-
- (function testErrorListenerCatches() {
- var count = 1000;
-diff --git a/test/simple/test-stream2-pipe-error-once-listener.js b/test/simple/test-stream2-pipe-error-once-listener.js
-index 5e8e3cb..53b2616 100755
---- a/test/simple/test-stream2-pipe-error-once-listener.js
-+++ b/test/simple/test-stream2-pipe-error-once-listener.js
-@@ -24,7 +24,7 @@ var common = require('../common.js');
- var assert = require('assert');
-
- var util = require('util');
--var stream = require('stream');
-+var stream = require('../../');
-
-
- var Read = function() {
-diff --git a/test/simple/test-stream2-push.js b/test/simple/test-stream2-push.js
-index b63edc3..eb2b0e9 100644
---- a/test/simple/test-stream2-push.js
-+++ b/test/simple/test-stream2-push.js
-@@ -20,7 +20,7 @@
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- var common = require('../common.js');
--var stream = require('stream');
-+var stream = require('../../');
- var Readable = stream.Readable;
- var Writable = stream.Writable;
- var assert = require('assert');
-diff --git a/test/simple/test-stream2-read-sync-stack.js b/test/simple/test-stream2-read-sync-stack.js
-index e8a7305..9740a47 100644
---- a/test/simple/test-stream2-read-sync-stack.js
-+++ b/test/simple/test-stream2-read-sync-stack.js
-@@ -21,7 +21,7 @@
-
- var common = require('../common');
- var assert = require('assert');
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
- var r = new Readable();
- var N = 256 * 1024;
-
-diff --git a/test/simple/test-stream2-readable-empty-buffer-no-eof.js b/test/simple/test-stream2-readable-empty-buffer-no-eof.js
-index cd30178..4b1659d 100644
---- a/test/simple/test-stream2-readable-empty-buffer-no-eof.js
-+++ b/test/simple/test-stream2-readable-empty-buffer-no-eof.js
-@@ -22,10 +22,9 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('stream').Readable;
-+var Readable = require('../../').Readable;
-
- test1();
--test2();
-
- function test1() {
- var r = new Readable();
-@@ -88,31 +87,3 @@ function test1() {
- console.log('ok');
- });
- }
--
--function test2() {
-- var r = new Readable({ encoding: 'base64' });
-- var reads = 5;
-- r._read = function(n) {
-- if (!reads--)
-- return r.push(null); // EOF
-- else
-- return r.push(new Buffer('x'));
-- };
--
-- var results = [];
-- function flow() {
-- var chunk;
-- while (null !== (chunk = r.read()))
-- results.push(chunk + '');
-- }
-- r.on('readable', flow);
-- r.on('end', function() {
-- results.push('EOF');
-- });
-- flow();
--
-- process.on('exit', function() {
-- assert.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]);
-- console.log('ok');
-- });
--}
-diff --git a/test/simple/test-stream2-readable-from-list.js b/test/simple/test-stream2-readable-from-list.js
-index 7c96ffe..04a96f5 100644
---- a/test/simple/test-stream2-readable-from-list.js
-+++ b/test/simple/test-stream2-readable-from-list.js
-@@ -21,7 +21,7 @@
-
- var assert = require('assert');
- var common = require('../common.js');
--var fromList = require('_stream_readable')._fromList;
-+var fromList = require('../../lib/_stream_readable')._fromList;
-
- // tiny node-tap lookalike.
- var tests = [];
-diff --git a/test/simple/test-stream2-readable-legacy-drain.js b/test/simple/test-stream2-readable-legacy-drain.js
-index 675da8e..51fd3d5 100644
---- a/test/simple/test-stream2-readable-legacy-drain.js
-+++ b/test/simple/test-stream2-readable-legacy-drain.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Stream = require('stream');
-+var Stream = require('../../');
- var Readable = Stream.Readable;
-
- var r = new Readable();
-diff --git a/test/simple/test-stream2-readable-non-empty-end.js b/test/simple/test-stream2-readable-non-empty-end.js
-index 7314ae7..c971898 100644
---- a/test/simple/test-stream2-readable-non-empty-end.js
-+++ b/test/simple/test-stream2-readable-non-empty-end.js
-@@ -21,7 +21,7 @@
-
- var assert = require('assert');
- var common = require('../common.js');
--var Readable = require('_stream_readable');
-+var Readable = require('../../lib/_stream_readable');
-
- var len = 0;
- var chunks = new Array(10);
-diff --git a/test/simple/test-stream2-readable-wrap-empty.js b/test/simple/test-stream2-readable-wrap-empty.js
-index 2e5cf25..fd8a3dc 100644
---- a/test/simple/test-stream2-readable-wrap-empty.js
-+++ b/test/simple/test-stream2-readable-wrap-empty.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('_stream_readable');
-+var Readable = require('../../lib/_stream_readable');
- var EE = require('events').EventEmitter;
-
- var oldStream = new EE();
-diff --git a/test/simple/test-stream2-readable-wrap.js b/test/simple/test-stream2-readable-wrap.js
-index 90eea01..6b177f7 100644
---- a/test/simple/test-stream2-readable-wrap.js
-+++ b/test/simple/test-stream2-readable-wrap.js
-@@ -22,8 +22,8 @@
- var common = require('../common');
- var assert = require('assert');
-
--var Readable = require('_stream_readable');
--var Writable = require('_stream_writable');
-+var Readable = require('../../lib/_stream_readable');
-+var Writable = require('../../lib/_stream_writable');
- var EE = require('events').EventEmitter;
-
- var testRuns = 0, completedRuns = 0;
-diff --git a/test/simple/test-stream2-set-encoding.js b/test/simple/test-stream2-set-encoding.js
-index 5d2c32a..685531b 100644
---- a/test/simple/test-stream2-set-encoding.js
-+++ b/test/simple/test-stream2-set-encoding.js
-@@ -22,7 +22,7 @@
-
- var common = require('../common.js');
- var assert = require('assert');
--var R = require('_stream_readable');
-+var R = require('../../lib/_stream_readable');
- var util = require('util');
-
- // tiny node-tap lookalike.
-diff --git a/test/simple/test-stream2-transform.js b/test/simple/test-stream2-transform.js
-index 9c9ddd8..a0cacc6 100644
---- a/test/simple/test-stream2-transform.js
-+++ b/test/simple/test-stream2-transform.js
-@@ -21,8 +21,8 @@
-
- var assert = require('assert');
- var common = require('../common.js');
--var PassThrough = require('_stream_passthrough');
--var Transform = require('_stream_transform');
-+var PassThrough = require('../../').PassThrough;
-+var Transform = require('../../').Transform;
-
- // tiny node-tap lookalike.
- var tests = [];
-diff --git a/test/simple/test-stream2-unpipe-drain.js b/test/simple/test-stream2-unpipe-drain.js
-index d66dc3c..365b327 100644
---- a/test/simple/test-stream2-unpipe-drain.js
-+++ b/test/simple/test-stream2-unpipe-drain.js
-@@ -22,7 +22,7 @@
-
- var common = require('../common.js');
- var assert = require('assert');
--var stream = require('stream');
-+var stream = require('../../');
- var crypto = require('crypto');
-
- var util = require('util');
-diff --git a/test/simple/test-stream2-unpipe-leak.js b/test/simple/test-stream2-unpipe-leak.js
-index 99f8746..17c92ae 100644
---- a/test/simple/test-stream2-unpipe-leak.js
-+++ b/test/simple/test-stream2-unpipe-leak.js
-@@ -22,7 +22,7 @@
-
- var common = require('../common.js');
- var assert = require('assert');
--var stream = require('stream');
-+var stream = require('../../');
-
- var chunk = new Buffer('hallo');
-
-diff --git a/test/simple/test-stream2-writable.js b/test/simple/test-stream2-writable.js
-index 704100c..209c3a6 100644
---- a/test/simple/test-stream2-writable.js
-+++ b/test/simple/test-stream2-writable.js
-@@ -20,8 +20,8 @@
- // USE OR OTHER DEALINGS IN THE SOFTWARE.
-
- var common = require('../common.js');
--var W = require('_stream_writable');
--var D = require('_stream_duplex');
-+var W = require('../../').Writable;
-+var D = require('../../').Duplex;
- var assert = require('assert');
-
- var util = require('util');
-diff --git a/test/simple/test-stream3-pause-then-read.js b/test/simple/test-stream3-pause-then-read.js
-index b91bde3..2f72c15 100644
---- a/test/simple/test-stream3-pause-then-read.js
-+++ b/test/simple/test-stream3-pause-then-read.js
-@@ -22,7 +22,7 @@
- var common = require('../common');
- var assert = require('assert');
-
--var stream = require('stream');
-+var stream = require('../../');
- var Readable = stream.Readable;
- var Writable = stream.Writable;
diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js
index b513d61a963a40..736693b8400fed 100644
--- a/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js
+++ b/deps/npm/node_modules/readable-stream/lib/_stream_duplex.js
@@ -1,39 +1,25 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
-module.exports = Duplex;
+'use strict';
/**/
+
var objectKeys = Object.keys || function (obj) {
var keys = [];
- for (var key in obj) keys.push(key);
- return keys;
-}
+ for (var key in obj) {
+ keys.push(key);
+ }return keys;
+};
/* */
+module.exports = Duplex;
+
+/**/
+var processNextTick = require('process-nextick-args');
+/* */
/**/
var util = require('core-util-is');
@@ -45,27 +31,24 @@ var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
-forEach(objectKeys(Writable.prototype), function(method) {
- if (!Duplex.prototype[method])
- Duplex.prototype[method] = Writable.prototype[method];
-});
+var keys = objectKeys(Writable.prototype);
+for (var v = 0; v < keys.length; v++) {
+ var method = keys[v];
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
+}
function Duplex(options) {
- if (!(this instanceof Duplex))
- return new Duplex(options);
+ if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
- if (options && options.readable === false)
- this.readable = false;
+ if (options && options.readable === false) this.readable = false;
- if (options && options.writable === false)
- this.writable = false;
+ if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
- if (options && options.allowHalfOpen === false)
- this.allowHalfOpen = false;
+ if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
@@ -74,16 +57,19 @@ function Duplex(options) {
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
- if (this.allowHalfOpen || this._writableState.ended)
- return;
+ if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
- process.nextTick(this.end.bind(this));
+ processNextTick(onEndNT, this);
}
-function forEach (xs, f) {
+function onEndNT(self) {
+ self.end();
+}
+
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js
index 895ca50a1d208a..d06f71f1868d77 100644
--- a/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js
+++ b/deps/npm/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -1,28 +1,9 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
+'use strict';
+
module.exports = PassThrough;
var Transform = require('./_stream_transform');
@@ -35,12 +16,11 @@ util.inherits = require('inherits');
util.inherits(PassThrough, Transform);
function PassThrough(options) {
- if (!(this instanceof PassThrough))
- return new PassThrough(options);
+ if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
-PassThrough.prototype._transform = function(chunk, encoding, cb) {
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
-};
+};
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js
index 19ab3588984252..54a9d5c553d69e 100644
--- a/deps/npm/node_modules/readable-stream/lib/_stream_readable.js
+++ b/deps/npm/node_modules/readable-stream/lib/_stream_readable.js
@@ -1,30 +1,14 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
+'use strict';
module.exports = Readable;
/**/
-var isArray = require('isarray');
+var processNextTick = require('process-nextick-args');
/* */
+/**/
+var isArray = require('isarray');
+/* */
/**/
var Buffer = require('buffer').Buffer;
@@ -32,49 +16,66 @@ var Buffer = require('buffer').Buffer;
Readable.ReadableState = ReadableState;
-var EE = require('events').EventEmitter;
+var EE = require('events');
/**/
-if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
+var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/* */
-var Stream = require('stream');
+/**/
+var Stream;
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
+/* */
+
+var Buffer = require('buffer').Buffer;
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
/* */
-var StringDecoder;
-
-
/**/
-var debug = require('util');
-if (debug && debug.debuglog) {
- debug = debug.debuglog('stream');
+var debugUtil = require('util');
+var debug = undefined;
+if (debugUtil && debugUtil.debuglog) {
+ debug = debugUtil.debuglog('stream');
} else {
debug = function () {};
}
/* */
+var StringDecoder;
util.inherits(Readable, Stream);
+var Duplex;
function ReadableState(options, stream) {
- var Duplex = require('./_stream_duplex');
+ Duplex = Duplex || require('./_stream_duplex');
options = options || {};
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
+
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
+
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
- var defaultHwm = options.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.buffer = [];
this.length = 0;
@@ -96,14 +97,7 @@ function ReadableState(options, stream) {
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
-
-
- // object stream flag. Used to make read(n) ignore n and to
- // make all the buffer merging and length checks go away
- this.objectMode = !!options.objectMode;
-
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.readableObjectMode;
+ this.resumeScheduled = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
@@ -123,24 +117,25 @@ function ReadableState(options, stream) {
this.decoder = null;
this.encoding = null;
if (options.encoding) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
+var Duplex;
function Readable(options) {
- var Duplex = require('./_stream_duplex');
+ Duplex = Duplex || require('./_stream_duplex');
- if (!(this instanceof Readable))
- return new Readable(options);
+ if (!(this instanceof Readable)) return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
+ if (options && typeof options.read === 'function') this._read = options.read;
+
Stream.call(this);
}
@@ -148,10 +143,10 @@ function Readable(options) {
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
-Readable.prototype.push = function(chunk, encoding) {
+Readable.prototype.push = function (chunk, encoding) {
var state = this._readableState;
- if (util.isString(chunk) && !state.objectMode) {
+ if (!state.objectMode && typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
@@ -163,19 +158,22 @@ Readable.prototype.push = function(chunk, encoding) {
};
// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function(chunk) {
+Readable.prototype.unshift = function (chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
+Readable.prototype.isPaused = function () {
+ return this._readableState.flowing === false;
+};
+
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
- } else if (util.isNullOrUndefined(chunk)) {
+ } else if (chunk === null) {
state.reading = false;
- if (!state.ended)
- onEofChunk(stream, state);
+ onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
@@ -184,26 +182,28 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
- if (state.decoder && !addToFront && !encoding)
+ var skipAdd;
+ if (state.decoder && !addToFront && !encoding) {
chunk = state.decoder.write(chunk);
+ skipAdd = !state.objectMode && chunk.length === 0;
+ }
- if (!addToFront)
- state.reading = false;
-
- // if we want the data now, just emit it.
- if (state.flowing && state.length === 0 && !state.sync) {
- stream.emit('data', chunk);
- stream.read(0);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront)
- state.buffer.unshift(chunk);
- else
- state.buffer.push(chunk);
-
- if (state.needReadable)
- emitReadable(stream);
+ if (!addToFront) state.reading = false;
+
+ // Don't add to the buffer if we've decoded to an empty string chunk and
+ // we're not in object mode
+ if (!skipAdd) {
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+
+ if (state.needReadable) emitReadable(stream);
+ }
}
maybeReadMore(stream, state);
@@ -215,8 +215,6 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
return needMoreData(state);
}
-
-
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
@@ -225,92 +223,80 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
- return !state.ended &&
- (state.needReadable ||
- state.length < state.highWaterMark ||
- state.length === 0);
+ return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
// backwards compatibility.
-Readable.prototype.setEncoding = function(enc) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+Readable.prototype.setEncoding = function (enc) {
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
};
-// Don't raise the hwm > 128MB
+// Don't raise the hwm > 8MB
var MAX_HWM = 0x800000;
-function roundUpToNextPowerOf2(n) {
+function computeNewHighWaterMark(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
- for (var p = 1; p < 32; p <<= 1) n |= n >> p;
+ n |= n >>> 1;
+ n |= n >>> 2;
+ n |= n >>> 4;
+ n |= n >>> 8;
+ n |= n >>> 16;
n++;
}
return n;
}
function howMuchToRead(n, state) {
- if (state.length === 0 && state.ended)
- return 0;
+ if (state.length === 0 && state.ended) return 0;
- if (state.objectMode)
- return n === 0 ? 0 : 1;
+ if (state.objectMode) return n === 0 ? 0 : 1;
- if (isNaN(n) || util.isNull(n)) {
+ if (n === null || isNaN(n)) {
// only flow one buffer at a time
- if (state.flowing && state.buffer.length)
- return state.buffer[0].length;
- else
- return state.length;
+ if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length;
}
- if (n <= 0)
- return 0;
+ if (n <= 0) return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
- if (n > state.highWaterMark)
- state.highWaterMark = roundUpToNextPowerOf2(n);
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
- } else
+ } else {
return state.length;
+ }
}
return n;
}
// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function(n) {
+Readable.prototype.read = function (n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
- if (!util.isNumber(n) || n > 0)
- state.emittedReadable = false;
+ if (typeof n !== 'number' || n > 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
- if (n === 0 &&
- state.needReadable &&
- (state.length >= state.highWaterMark || state.ended)) {
+ if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended)
- endReadable(this);
- else
- emitReadable(this);
+ if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
return null;
}
@@ -318,8 +304,7 @@ Readable.prototype.read = function(n) {
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
- if (state.length === 0)
- endReadable(this);
+ if (state.length === 0) endReadable(this);
return null;
}
@@ -367,8 +352,7 @@ Readable.prototype.read = function(n) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
- if (state.length === 0)
- state.needReadable = true;
+ if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
@@ -376,16 +360,12 @@ Readable.prototype.read = function(n) {
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
- if (doRead && !state.reading)
- n = howMuchToRead(nOrig, state);
+ if (doRead && !state.reading) n = howMuchToRead(nOrig, state);
var ret;
- if (n > 0)
- ret = fromList(n, state);
- else
- ret = null;
+ if (n > 0) ret = fromList(n, state);else ret = null;
- if (util.isNull(ret)) {
+ if (ret === null) {
state.needReadable = true;
n = 0;
}
@@ -394,33 +374,27 @@ Readable.prototype.read = function(n) {
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
- if (state.length === 0 && !state.ended)
- state.needReadable = true;
+ if (state.length === 0 && !state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
- if (nOrig !== n && state.ended && state.length === 0)
- endReadable(this);
+ if (nOrig !== n && state.ended && state.length === 0) endReadable(this);
- if (!util.isNull(ret))
- this.emit('data', ret);
+ if (ret !== null) this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
- if (!util.isBuffer(chunk) &&
- !util.isString(chunk) &&
- !util.isNullOrUndefined(chunk) &&
- !state.objectMode) {
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
-
function onEofChunk(stream, state) {
- if (state.decoder && !state.ended) {
+ if (state.ended) return;
+ if (state.decoder) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
@@ -442,12 +416,7 @@ function emitReadable(stream) {
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
- if (state.sync)
- process.nextTick(function() {
- emitReadable_(stream);
- });
- else
- emitReadable_(stream);
+ if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream);
}
}
@@ -457,7 +426,6 @@ function emitReadable_(stream) {
flow(stream);
}
-
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
@@ -467,23 +435,18 @@ function emitReadable_(stream) {
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
- process.nextTick(function() {
- maybeReadMore_(stream, state);
- });
+ processNextTick(maybeReadMore_, stream, state);
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
- while (!state.reading && !state.flowing && !state.ended &&
- state.length < state.highWaterMark) {
+ while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
- break;
- else
- len = state.length;
+ break;else len = state.length;
}
state.readingMore = false;
}
@@ -492,11 +455,11 @@ function maybeReadMore_(stream, state) {
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function(n) {
+Readable.prototype._read = function (n) {
this.emit('error', new Error('not implemented'));
};
-Readable.prototype.pipe = function(dest, pipeOpts) {
+Readable.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
@@ -514,15 +477,10 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
- var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
- dest !== process.stdout &&
- dest !== process.stderr;
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
- if (state.endEmitted)
- process.nextTick(endFn);
- else
- src.once('end', endFn);
+ if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
@@ -544,6 +502,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
+ var cleanedUp = false;
function cleanup() {
debug('cleanup');
// cleanup event handlers once the pipe is broken
@@ -556,14 +515,14 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
src.removeListener('end', cleanup);
src.removeListener('data', ondata);
+ cleanedUp = true;
+
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
- if (state.awaitDrain &&
- (!dest._writableState || dest._writableState.needDrain))
- ondrain();
+ if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
src.on('data', ondata);
@@ -571,9 +530,13 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
debug('ondata');
var ret = dest.write(chunk);
if (false === ret) {
- debug('false write response, pause',
- src._readableState.awaitDrain);
- src._readableState.awaitDrain++;
+ // If the user unpiped during `dest.write()`, it is possible
+ // to get stuck in a permanently paused state if that write
+ // also returned false.
+ if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) {
+ debug('false write response, pause', src._readableState.awaitDrain);
+ src._readableState.awaitDrain++;
+ }
src.pause();
}
}
@@ -584,19 +547,11 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
- if (EE.listenerCount(dest, 'error') === 0)
- dest.emit('error', er);
+ if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
- if (!dest._events || !dest._events.error)
- dest.on('error', onerror);
- else if (isArray(dest._events.error))
- dest._events.error.unshift(onerror);
- else
- dest._events.error = [onerror, dest._events.error];
-
-
+ if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
@@ -629,41 +584,35 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
};
function pipeOnDrain(src) {
- return function() {
+ return function () {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain)
- state.awaitDrain--;
- if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) {
+ if (state.awaitDrain) state.awaitDrain--;
+ if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow(src);
}
};
}
-
-Readable.prototype.unpipe = function(dest) {
+Readable.prototype.unpipe = function (dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
- if (state.pipesCount === 0)
- return this;
+ if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
- if (dest && dest !== state.pipes)
- return this;
+ if (dest && dest !== state.pipes) return this;
- if (!dest)
- dest = state.pipes;
+ if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
- if (dest)
- dest.emit('unpipe', this);
+ if (dest) dest.emit('unpipe', this);
return this;
}
@@ -677,20 +626,18 @@ Readable.prototype.unpipe = function(dest) {
state.pipesCount = 0;
state.flowing = false;
- for (var i = 0; i < len; i++)
- dests[i].emit('unpipe', this);
- return this;
+ for (var _i = 0; _i < len; _i++) {
+ dests[_i].emit('unpipe', this);
+ }return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
- if (i === -1)
- return this;
+ if (i === -1) return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
- if (state.pipesCount === 1)
- state.pipes = state.pipes[0];
+ if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this);
@@ -699,7 +646,7 @@ Readable.prototype.unpipe = function(dest) {
// set up data events if they are asked for
// Ensure readable listeners eventually get something
-Readable.prototype.on = function(ev, fn) {
+Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
@@ -708,18 +655,14 @@ Readable.prototype.on = function(ev, fn) {
this.resume();
}
- if (ev === 'readable' && this.readable) {
+ if (ev === 'readable' && !this._readableState.endEmitted) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
- var self = this;
- process.nextTick(function() {
- debug('readable nexttick read 0');
- self.read(0);
- });
+ processNextTick(nReadingNextTick, this);
} else if (state.length) {
emitReadable(this, state);
}
@@ -730,17 +673,18 @@ Readable.prototype.on = function(ev, fn) {
};
Readable.prototype.addListener = Readable.prototype.on;
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
+
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function() {
+Readable.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
state.flowing = true;
- if (!state.reading) {
- debug('resume read 0');
- this.read(0);
- }
resume(this, state);
}
return this;
@@ -749,21 +693,23 @@ Readable.prototype.resume = function() {
function resume(stream, state) {
if (!state.resumeScheduled) {
state.resumeScheduled = true;
- process.nextTick(function() {
- resume_(stream, state);
- });
+ processNextTick(resume_, stream, state);
}
}
function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
+
state.resumeScheduled = false;
stream.emit('resume');
flow(stream);
- if (state.flowing && !state.reading)
- stream.read(0);
+ if (state.flowing && !state.reading) stream.read(0);
}
-Readable.prototype.pause = function() {
+Readable.prototype.pause = function () {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
@@ -786,28 +732,27 @@ function flow(stream) {
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function(stream) {
+Readable.prototype.wrap = function (stream) {
var state = this._readableState;
var paused = false;
var self = this;
- stream.on('end', function() {
+ stream.on('end', function () {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
- if (chunk && chunk.length)
- self.push(chunk);
+ if (chunk && chunk.length) self.push(chunk);
}
self.push(null);
});
- stream.on('data', function(chunk) {
+ stream.on('data', function (chunk) {
debug('wrapped data');
- if (state.decoder)
- chunk = state.decoder.write(chunk);
- if (!chunk || !state.objectMode && !chunk.length)
- return;
+ if (state.decoder) chunk = state.decoder.write(chunk);
+
+ // don't skip over falsy values in objectMode
+ if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = self.push(chunk);
if (!ret) {
@@ -819,22 +764,24 @@ Readable.prototype.wrap = function(stream) {
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
- if (util.isFunction(stream[i]) && util.isUndefined(this[i])) {
- this[i] = function(method) { return function() {
- return stream[method].apply(stream, arguments);
- }}(i);
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = function (method) {
+ return function () {
+ return stream[method].apply(stream, arguments);
+ };
+ }(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
- forEach(events, function(ev) {
+ forEach(events, function (ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
- self._read = function(n) {
+ self._read = function (n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
@@ -845,8 +792,6 @@ Readable.prototype.wrap = function(stream) {
return self;
};
-
-
// exposed for testing purposes only.
Readable._fromList = fromList;
@@ -860,19 +805,11 @@ function fromList(n, state) {
var ret;
// nothing in the list, definitely empty.
- if (list.length === 0)
- return null;
+ if (list.length === 0) return null;
- if (length === 0)
- ret = null;
- else if (objectMode)
- ret = list.shift();
- else if (!n || n >= length) {
+ if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) {
// read it all, truncate the array.
- if (stringMode)
- ret = list.join('');
- else
- ret = Buffer.concat(list, length);
+ if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
@@ -888,25 +825,16 @@ function fromList(n, state) {
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
- if (stringMode)
- ret = '';
- else
- ret = new Buffer(n);
+ if (stringMode) ret = '';else ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
- if (stringMode)
- ret += buf.slice(0, cpy);
- else
- buf.copy(ret, c, 0, cpy);
+ if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy);
- if (cpy < buf.length)
- list[0] = buf.slice(cpy);
- else
- list.shift();
+ if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift();
c += cpy;
}
@@ -921,31 +849,32 @@ function endReadable(stream) {
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
- if (state.length > 0)
- throw new Error('endReadable called on non-empty stream');
+ if (state.length > 0) throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
- process.nextTick(function() {
- // Check that we didn't get one last unshift.
- if (!state.endEmitted && state.length === 0) {
- state.endEmitted = true;
- stream.readable = false;
- stream.emit('end');
- }
- });
+ processNextTick(endReadableNT, state, stream);
}
}
-function forEach (xs, f) {
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
+ }
+}
+
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
-function indexOf (xs, x) {
+function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js
index 905c5e450758b3..625cdc17698059 100644
--- a/deps/npm/node_modules/readable-stream/lib/_stream_transform.js
+++ b/deps/npm/node_modules/readable-stream/lib/_stream_transform.js
@@ -1,25 +1,3 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
@@ -62,6 +40,8 @@
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
+'use strict';
+
module.exports = Transform;
var Duplex = require('./_stream_duplex');
@@ -73,9 +53,8 @@ util.inherits = require('inherits');
util.inherits(Transform, Duplex);
-
-function TransformState(options, stream) {
- this.afterTransform = function(er, data) {
+function TransformState(stream) {
+ this.afterTransform = function (er, data) {
return afterTransform(stream, er, data);
};
@@ -83,6 +62,7 @@ function TransformState(options, stream) {
this.transforming = false;
this.writecb = null;
this.writechunk = null;
+ this.writeencoding = null;
}
function afterTransform(stream, er, data) {
@@ -91,17 +71,14 @@ function afterTransform(stream, er, data) {
var cb = ts.writecb;
- if (!cb)
- return stream.emit('error', new Error('no writecb in Transform class'));
+ if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
- if (!util.isNullOrUndefined(data))
- stream.push(data);
+ if (data !== null && data !== undefined) stream.push(data);
- if (cb)
- cb(er);
+ cb(er);
var rs = stream._readableState;
rs.reading = false;
@@ -110,14 +87,12 @@ function afterTransform(stream, er, data) {
}
}
-
function Transform(options) {
- if (!(this instanceof Transform))
- return new Transform(options);
+ if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
- this._transformState = new TransformState(options, this);
+ this._transformState = new TransformState(this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
@@ -130,17 +105,20 @@ function Transform(options) {
// sync guard flag.
this._readableState.sync = false;
- this.once('prefinish', function() {
- if (util.isFunction(this._flush))
- this._flush(function(er) {
- done(stream, er);
- });
- else
- done(stream);
+ if (options) {
+ if (typeof options.transform === 'function') this._transform = options.transform;
+
+ if (typeof options.flush === 'function') this._flush = options.flush;
+ }
+
+ this.once('prefinish', function () {
+ if (typeof this._flush === 'function') this._flush(function (er) {
+ done(stream, er);
+ });else done(stream);
});
}
-Transform.prototype.push = function(chunk, encoding) {
+Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
@@ -155,31 +133,28 @@ Transform.prototype.push = function(chunk, encoding) {
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
-Transform.prototype._transform = function(chunk, encoding, cb) {
+Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('not implemented');
};
-Transform.prototype._write = function(chunk, encoding, cb) {
+Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
- if (ts.needTransform ||
- rs.needReadable ||
- rs.length < rs.highWaterMark)
- this._read(rs.highWaterMark);
+ if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
-Transform.prototype._read = function(n) {
+Transform.prototype._read = function (n) {
var ts = this._transformState;
- if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) {
+ if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
@@ -189,21 +164,17 @@ Transform.prototype._read = function(n) {
}
};
-
function done(stream, er) {
- if (er)
- return stream.emit('error', er);
+ if (er) return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
- if (ws.length)
- throw new Error('calling transform done when ws.length != 0');
+ if (ws.length) throw new Error('calling transform done when ws.length != 0');
- if (ts.transforming)
- throw new Error('calling transform done when still transforming');
+ if (ts.transforming) throw new Error('calling transform done when still transforming');
return stream.push(null);
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js
index db8539cd5b818d..95916c992a9507 100644
--- a/deps/npm/node_modules/readable-stream/lib/_stream_writable.js
+++ b/deps/npm/node_modules/readable-stream/lib/_stream_writable.js
@@ -1,73 +1,81 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
// A bit simpler than readable streams.
-// Implement an async ._write(chunk, cb), and it'll handle all
+// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
+'use strict';
+
module.exports = Writable;
+/**/
+var processNextTick = require('process-nextick-args');
+/* */
+
+/**/
+var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick;
+/* */
+
/**/
var Buffer = require('buffer').Buffer;
/* */
Writable.WritableState = WritableState;
-
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
/* */
-var Stream = require('stream');
+/**/
+var internalUtil = {
+ deprecate: require('util-deprecate')
+};
+/* */
+
+/**/
+var Stream;
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
+/* */
+
+var Buffer = require('buffer').Buffer;
util.inherits(Writable, Stream);
+function nop() {}
+
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
+ this.next = null;
}
+var Duplex;
function WritableState(options, stream) {
- var Duplex = require('./_stream_duplex');
+ Duplex = Duplex || require('./_stream_duplex');
options = options || {};
- // the point at which write() starts returning false
- // Note: 0 is a valid value, means that we always return false if
- // the entire buffer is not flushed immediately on write()
- var hwm = options.highWaterMark;
- var defaultHwm = options.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
-
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.writableObjectMode;
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
+
+ // the point at which write() starts returning false
+ // Note: 0 is a valid value, means that we always return false if
+ // the entire buffer is not flushed immediately on write()
+ var hwm = options.highWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
@@ -111,7 +119,7 @@ function WritableState(options, stream) {
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
- this.onwrite = function(er) {
+ this.onwrite = function (er) {
onwrite(stream, er);
};
@@ -121,7 +129,8 @@ function WritableState(options, stream) {
// the amount that is being written when _write is called.
this.writelen = 0;
- this.buffer = [];
+ this.bufferedRequest = null;
+ this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
@@ -133,37 +142,68 @@ function WritableState(options, stream) {
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
+
+ // count buffered requests
+ this.bufferedRequestCount = 0;
+
+ // create the two objects needed to store the corked requests
+ // they are not a linked list, as no new elements are inserted in there
+ this.corkedRequestsFree = new CorkedRequest(this);
+ this.corkedRequestsFree.next = new CorkedRequest(this);
}
+WritableState.prototype.getBuffer = function writableStateGetBuffer() {
+ var current = this.bufferedRequest;
+ var out = [];
+ while (current) {
+ out.push(current);
+ current = current.next;
+ }
+ return out;
+};
+
+(function () {
+ try {
+ Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: internalUtil.deprecate(function () {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.')
+ });
+ } catch (_) {}
+})();
+
+var Duplex;
function Writable(options) {
- var Duplex = require('./_stream_duplex');
+ Duplex = Duplex || require('./_stream_duplex');
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
- if (!(this instanceof Writable) && !(this instanceof Duplex))
- return new Writable(options);
+ if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
+ if (options) {
+ if (typeof options.write === 'function') this._write = options.write;
+
+ if (typeof options.writev === 'function') this._writev = options.writev;
+ }
+
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
-Writable.prototype.pipe = function() {
+Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
-
-function writeAfterEnd(stream, state, cb) {
+function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
- process.nextTick(function() {
- cb(er);
- });
+ processNextTick(cb, er);
}
// If we get something that is not a buffer, string, null, or undefined,
@@ -173,40 +213,30 @@ function writeAfterEnd(stream, state, cb) {
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
- if (!util.isBuffer(chunk) &&
- !util.isString(chunk) &&
- !util.isNullOrUndefined(chunk) &&
- !state.objectMode) {
+
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
- process.nextTick(function() {
- cb(er);
- });
+ processNextTick(cb, er);
valid = false;
}
return valid;
}
-Writable.prototype.write = function(chunk, encoding, cb) {
+Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
- if (util.isFunction(encoding)) {
+ if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
- if (util.isBuffer(chunk))
- encoding = 'buffer';
- else if (!encoding)
- encoding = state.defaultEncoding;
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
- if (!util.isFunction(cb))
- cb = function() {};
+ if (typeof cb !== 'function') cb = nop;
- if (state.ended)
- writeAfterEnd(this, state, cb);
- else if (validChunk(this, state, chunk, cb)) {
+ if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
}
@@ -214,31 +244,31 @@ Writable.prototype.write = function(chunk, encoding, cb) {
return ret;
};
-Writable.prototype.cork = function() {
+Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
-Writable.prototype.uncork = function() {
+Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
- if (!state.writing &&
- !state.corked &&
- !state.finished &&
- !state.bufferProcessing &&
- state.buffer.length)
- clearBuffer(this, state);
+ if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
+Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
+ // node::ParseEncoding() requires lower case.
+ if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
+ this._writableState.defaultEncoding = encoding;
+};
+
function decodeChunk(state, chunk, encoding) {
- if (!state.objectMode &&
- state.decodeStrings !== false &&
- util.isString(chunk)) {
+ if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
@@ -249,21 +279,28 @@ function decodeChunk(state, chunk, encoding) {
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
- if (util.isBuffer(chunk))
- encoding = 'buffer';
+
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
- if (!ret)
- state.needDrain = true;
+ if (!ret) state.needDrain = true;
- if (state.writing || state.corked)
- state.buffer.push(new WriteReq(chunk, encoding, cb));
- else
+ if (state.writing || state.corked) {
+ var last = state.lastBufferedRequest;
+ state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
+ if (last) {
+ last.next = state.lastBufferedRequest;
+ } else {
+ state.bufferedRequest = state.lastBufferedRequest;
+ }
+ state.bufferedRequestCount += 1;
+ } else {
doWrite(stream, state, false, len, chunk, encoding, cb);
+ }
return ret;
}
@@ -273,23 +310,13 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writecb = cb;
state.writing = true;
state.sync = true;
- if (writev)
- stream._writev(chunk, state.onwrite);
- else
- stream._write(chunk, encoding, state.onwrite);
+ if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
- if (sync)
- process.nextTick(function() {
- state.pendingcb--;
- cb(er);
- });
- else {
- state.pendingcb--;
- cb(er);
- }
+ --state.pendingcb;
+ if (sync) processNextTick(cb, er);else cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
@@ -309,32 +336,26 @@ function onwrite(stream, er) {
onwriteStateUpdate(state);
- if (er)
- onwriteError(stream, state, sync, er, cb);
- else {
+ if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
- var finished = needFinish(stream, state);
+ var finished = needFinish(state);
- if (!finished &&
- !state.corked &&
- !state.bufferProcessing &&
- state.buffer.length) {
+ if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
- process.nextTick(function() {
- afterWrite(stream, state, finished, cb);
- });
+ /**/
+ asyncWrite(afterWrite, stream, state, finished, cb);
+ /* */
} else {
- afterWrite(stream, state, finished, cb);
- }
+ afterWrite(stream, state, finished, cb);
+ }
}
}
function afterWrite(stream, state, finished, cb) {
- if (!finished)
- onwriteDrain(stream, state);
+ if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
@@ -350,80 +371,79 @@ function onwriteDrain(stream, state) {
}
}
-
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
+ var entry = state.bufferedRequest;
- if (stream._writev && state.buffer.length > 1) {
+ if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
- var cbs = [];
- for (var c = 0; c < state.buffer.length; c++)
- cbs.push(state.buffer[c].callback);
+ var l = state.bufferedRequestCount;
+ var buffer = new Array(l);
+ var holder = state.corkedRequestsFree;
+ holder.entry = entry;
+
+ var count = 0;
+ while (entry) {
+ buffer[count] = entry;
+ entry = entry.next;
+ count += 1;
+ }
- // count the one we are adding, as well.
- // TODO(isaacs) clean this up
- state.pendingcb++;
- doWrite(stream, state, true, state.length, state.buffer, '', function(err) {
- for (var i = 0; i < cbs.length; i++) {
- state.pendingcb--;
- cbs[i](err);
- }
- });
+ doWrite(stream, state, true, state.length, buffer, '', holder.finish);
- // Clear buffer
- state.buffer = [];
+ // doWrite is always async, defer these to save a bit of time
+ // as the hot path ends with doWrite
+ state.pendingcb++;
+ state.lastBufferedRequest = null;
+ state.corkedRequestsFree = holder.next;
+ holder.next = null;
} else {
// Slow case, write chunks one-by-one
- for (var c = 0; c < state.buffer.length; c++) {
- var entry = state.buffer[c];
+ while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
-
+ entry = entry.next;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
- c++;
break;
}
}
- if (c < state.buffer.length)
- state.buffer = state.buffer.slice(c);
- else
- state.buffer.length = 0;
+ if (entry === null) state.lastBufferedRequest = null;
}
+ state.bufferedRequestCount = 0;
+ state.bufferedRequest = entry;
state.bufferProcessing = false;
}
-Writable.prototype._write = function(chunk, encoding, cb) {
+Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('not implemented'));
-
};
Writable.prototype._writev = null;
-Writable.prototype.end = function(chunk, encoding, cb) {
+Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
- if (util.isFunction(chunk)) {
+ if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
- } else if (util.isFunction(encoding)) {
+ } else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
- if (!util.isNullOrUndefined(chunk))
- this.write(chunk, encoding);
+ if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
@@ -432,16 +452,11 @@ Writable.prototype.end = function(chunk, encoding, cb) {
}
// ignore unnecessary end() calls.
- if (!state.ending && !state.finished)
- endWritable(this, state, cb);
+ if (!state.ending && !state.finished) endWritable(this, state, cb);
};
-
-function needFinish(stream, state) {
- return (state.ending &&
- state.length === 0 &&
- !state.finished &&
- !state.writing);
+function needFinish(state) {
+ return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function prefinish(stream, state) {
@@ -452,14 +467,15 @@ function prefinish(stream, state) {
}
function finishMaybe(stream, state) {
- var need = needFinish(stream, state);
+ var need = needFinish(state);
if (need) {
if (state.pendingcb === 0) {
prefinish(stream, state);
state.finished = true;
stream.emit('finish');
- } else
+ } else {
prefinish(stream, state);
+ }
}
return need;
}
@@ -468,10 +484,33 @@ function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
- if (state.finished)
- process.nextTick(cb);
- else
- stream.once('finish', cb);
+ if (state.finished) processNextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
+ stream.writable = false;
}
+
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+ var _this = this;
+
+ this.next = null;
+ this.entry = null;
+
+ this.finish = function (err) {
+ var entry = _this.entry;
+ _this.entry = null;
+ while (entry) {
+ var cb = entry.callback;
+ state.pendingcb--;
+ cb(err);
+ entry = entry.next;
+ }
+ if (state.corkedRequestsFree) {
+ state.corkedRequestsFree.next = _this;
+ } else {
+ state.corkedRequestsFree = _this;
+ }
+ };
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/LICENSE b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/LICENSE
new file mode 100644
index 00000000000000..d8d7f9437dbf5a
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/LICENSE
@@ -0,0 +1,19 @@
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/lib/util.js b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
index 9074e8ebcb61e9..ff4c851c075a2f 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
+++ b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
@@ -21,8 +21,12 @@
// NOTE: These type checking functions intentionally don't use `instanceof`
// because it is fragile and can be easily faked with `Object.create()`.
-function isArray(ar) {
- return Array.isArray(ar);
+
+function isArray(arg) {
+ if (Array.isArray) {
+ return Array.isArray(arg);
+ }
+ return objectToString(arg) === '[object Array]';
}
exports.isArray = isArray;
@@ -62,7 +66,7 @@ function isUndefined(arg) {
exports.isUndefined = isUndefined;
function isRegExp(re) {
- return isObject(re) && objectToString(re) === '[object RegExp]';
+ return objectToString(re) === '[object RegExp]';
}
exports.isRegExp = isRegExp;
@@ -72,13 +76,12 @@ function isObject(arg) {
exports.isObject = isObject;
function isDate(d) {
- return isObject(d) && objectToString(d) === '[object Date]';
+ return objectToString(d) === '[object Date]';
}
exports.isDate = isDate;
function isError(e) {
- return isObject(e) &&
- (objectToString(e) === '[object Error]' || e instanceof Error);
+ return (objectToString(e) === '[object Error]' || e instanceof Error);
}
exports.isError = isError;
@@ -97,11 +100,8 @@ function isPrimitive(arg) {
}
exports.isPrimitive = isPrimitive;
-function isBuffer(arg) {
- return Buffer.isBuffer(arg);
-}
-exports.isBuffer = isBuffer;
+exports.isBuffer = Buffer.isBuffer;
function objectToString(o) {
return Object.prototype.toString.call(o);
-}
\ No newline at end of file
+}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/package.json b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/package.json
index b67333380c265e..ddd227e64f99f4 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/package.json
+++ b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/package.json
@@ -1,6 +1,6 @@
{
"name": "core-util-is",
- "version": "1.0.1",
+ "version": "1.0.2",
"description": "The `util.is*` functions introduced in Node v0.12.",
"main": "lib/util.js",
"repository": {
@@ -27,11 +27,34 @@
"bugs": {
"url": "https://github.com/isaacs/core-util-is/issues"
},
- "readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n",
- "readmeFilename": "README.md",
+ "scripts": {
+ "test": "tap test.js"
+ },
+ "devDependencies": {
+ "tap": "^2.3.0"
+ },
+ "gitHead": "a177da234df5638b363ddc15fa324619a38577c8",
"homepage": "https://github.com/isaacs/core-util-is#readme",
- "_id": "core-util-is@1.0.1",
- "_shasum": "6b07085aef9a3ccac6ee53bf9d3df0c1521a5538",
- "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.1.tgz",
- "_from": "core-util-is@>=1.0.0 <1.1.0"
+ "_id": "core-util-is@1.0.2",
+ "_shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7",
+ "_from": "core-util-is@>=1.0.0 <1.1.0",
+ "_npmVersion": "3.3.2",
+ "_nodeVersion": "4.0.0",
+ "_npmUser": {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ },
+ "dist": {
+ "shasum": "b5fd54220aa2bc5ab57aab7140c940754503c1a7",
+ "tarball": "http://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "isaacs",
+ "email": "i@izs.me"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/test.js b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/test.js
new file mode 100644
index 00000000000000..1a490c65ac8b5d
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/test.js
@@ -0,0 +1,68 @@
+var assert = require('tap');
+
+var t = require('./lib/util');
+
+assert.equal(t.isArray([]), true);
+assert.equal(t.isArray({}), false);
+
+assert.equal(t.isBoolean(null), false);
+assert.equal(t.isBoolean(true), true);
+assert.equal(t.isBoolean(false), true);
+
+assert.equal(t.isNull(null), true);
+assert.equal(t.isNull(undefined), false);
+assert.equal(t.isNull(false), false);
+assert.equal(t.isNull(), false);
+
+assert.equal(t.isNullOrUndefined(null), true);
+assert.equal(t.isNullOrUndefined(undefined), true);
+assert.equal(t.isNullOrUndefined(false), false);
+assert.equal(t.isNullOrUndefined(), true);
+
+assert.equal(t.isNumber(null), false);
+assert.equal(t.isNumber('1'), false);
+assert.equal(t.isNumber(1), true);
+
+assert.equal(t.isString(null), false);
+assert.equal(t.isString('1'), true);
+assert.equal(t.isString(1), false);
+
+assert.equal(t.isSymbol(null), false);
+assert.equal(t.isSymbol('1'), false);
+assert.equal(t.isSymbol(1), false);
+assert.equal(t.isSymbol(Symbol()), true);
+
+assert.equal(t.isUndefined(null), false);
+assert.equal(t.isUndefined(undefined), true);
+assert.equal(t.isUndefined(false), false);
+assert.equal(t.isUndefined(), true);
+
+assert.equal(t.isRegExp(null), false);
+assert.equal(t.isRegExp('1'), false);
+assert.equal(t.isRegExp(new RegExp()), true);
+
+assert.equal(t.isObject({}), true);
+assert.equal(t.isObject([]), true);
+assert.equal(t.isObject(new RegExp()), true);
+assert.equal(t.isObject(new Date()), true);
+
+assert.equal(t.isDate(null), false);
+assert.equal(t.isDate('1'), false);
+assert.equal(t.isDate(new Date()), true);
+
+assert.equal(t.isError(null), false);
+assert.equal(t.isError({ err: true }), false);
+assert.equal(t.isError(new Error()), true);
+
+assert.equal(t.isFunction(null), false);
+assert.equal(t.isFunction({ }), false);
+assert.equal(t.isFunction(function() {}), true);
+
+assert.equal(t.isPrimitive(null), true);
+assert.equal(t.isPrimitive(''), true);
+assert.equal(t.isPrimitive(0), true);
+assert.equal(t.isPrimitive(new Date()), false);
+
+assert.equal(t.isBuffer(null), false);
+assert.equal(t.isBuffer({}), false);
+assert.equal(t.isBuffer(new Buffer(0)), true);
diff --git a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/util.js b/deps/npm/node_modules/readable-stream/node_modules/core-util-is/util.js
deleted file mode 100644
index 007fa10575636d..00000000000000
--- a/deps/npm/node_modules/readable-stream/node_modules/core-util-is/util.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// NOTE: These type checking functions intentionally don't use `instanceof`
-// because it is fragile and can be easily faked with `Object.create()`.
-function isArray(ar) {
- return Array.isArray(ar);
-}
-exports.isArray = isArray;
-
-function isBoolean(arg) {
- return typeof arg === 'boolean';
-}
-exports.isBoolean = isBoolean;
-
-function isNull(arg) {
- return arg === null;
-}
-exports.isNull = isNull;
-
-function isNullOrUndefined(arg) {
- return arg == null;
-}
-exports.isNullOrUndefined = isNullOrUndefined;
-
-function isNumber(arg) {
- return typeof arg === 'number';
-}
-exports.isNumber = isNumber;
-
-function isString(arg) {
- return typeof arg === 'string';
-}
-exports.isString = isString;
-
-function isSymbol(arg) {
- return typeof arg === 'symbol';
-}
-exports.isSymbol = isSymbol;
-
-function isUndefined(arg) {
- return arg === void 0;
-}
-exports.isUndefined = isUndefined;
-
-function isRegExp(re) {
- return isObject(re) && objectToString(re) === '[object RegExp]';
-}
-exports.isRegExp = isRegExp;
-
-function isObject(arg) {
- return typeof arg === 'object' && arg !== null;
-}
-exports.isObject = isObject;
-
-function isDate(d) {
- return isObject(d) && objectToString(d) === '[object Date]';
-}
-exports.isDate = isDate;
-
-function isError(e) {
- return isObject(e) && objectToString(e) === '[object Error]';
-}
-exports.isError = isError;
-
-function isFunction(arg) {
- return typeof arg === 'function';
-}
-exports.isFunction = isFunction;
-
-function isPrimitive(arg) {
- return arg === null ||
- typeof arg === 'boolean' ||
- typeof arg === 'number' ||
- typeof arg === 'string' ||
- typeof arg === 'symbol' || // ES6 symbol
- typeof arg === 'undefined';
-}
-exports.isPrimitive = isPrimitive;
-
-function isBuffer(arg) {
- return arg instanceof Buffer;
-}
-exports.isBuffer = isBuffer;
-
-function objectToString(o) {
- return Object.prototype.toString.call(o);
-}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/readable-stream/node_modules/isarray/.npmignore
new file mode 100644
index 00000000000000..3c3629e647f5dd
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/.npmignore
@@ -0,0 +1 @@
+node_modules
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/readable-stream/node_modules/isarray/.travis.yml
new file mode 100644
index 00000000000000..cc4dba29d959a2
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - "0.8"
+ - "0.10"
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/readable-stream/node_modules/isarray/Makefile
new file mode 100644
index 00000000000000..0ecc29c402c243
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/Makefile
@@ -0,0 +1,5 @@
+
+test:
+ @node_modules/.bin/tape test.js
+
+.PHONY: test
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/readable-stream/node_modules/isarray/README.md
index 052a62b8d7b7ae..16d2c59c6195f9 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/isarray/README.md
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/README.md
@@ -3,6 +3,12 @@
`Array#isArray` for older browsers.
+[](http://travis-ci.org/juliangruber/isarray)
+[](https://www.npmjs.org/package/isarray)
+
+[
+](https://ci.testling.com/juliangruber/isarray)
+
## Usage
```js
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/build/build.js b/deps/npm/node_modules/readable-stream/node_modules/isarray/build/build.js
deleted file mode 100644
index ec58596aeebe4e..00000000000000
--- a/deps/npm/node_modules/readable-stream/node_modules/isarray/build/build.js
+++ /dev/null
@@ -1,209 +0,0 @@
-
-/**
- * Require the given path.
- *
- * @param {String} path
- * @return {Object} exports
- * @api public
- */
-
-function require(path, parent, orig) {
- var resolved = require.resolve(path);
-
- // lookup failed
- if (null == resolved) {
- orig = orig || path;
- parent = parent || 'root';
- var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
- err.path = orig;
- err.parent = parent;
- err.require = true;
- throw err;
- }
-
- var module = require.modules[resolved];
-
- // perform real require()
- // by invoking the module's
- // registered function
- if (!module.exports) {
- module.exports = {};
- module.client = module.component = true;
- module.call(this, module.exports, require.relative(resolved), module);
- }
-
- return module.exports;
-}
-
-/**
- * Registered modules.
- */
-
-require.modules = {};
-
-/**
- * Registered aliases.
- */
-
-require.aliases = {};
-
-/**
- * Resolve `path`.
- *
- * Lookup:
- *
- * - PATH/index.js
- * - PATH.js
- * - PATH
- *
- * @param {String} path
- * @return {String} path or null
- * @api private
- */
-
-require.resolve = function(path) {
- if (path.charAt(0) === '/') path = path.slice(1);
- var index = path + '/index.js';
-
- var paths = [
- path,
- path + '.js',
- path + '.json',
- path + '/index.js',
- path + '/index.json'
- ];
-
- for (var i = 0; i < paths.length; i++) {
- var path = paths[i];
- if (require.modules.hasOwnProperty(path)) return path;
- }
-
- if (require.aliases.hasOwnProperty(index)) {
- return require.aliases[index];
- }
-};
-
-/**
- * Normalize `path` relative to the current path.
- *
- * @param {String} curr
- * @param {String} path
- * @return {String}
- * @api private
- */
-
-require.normalize = function(curr, path) {
- var segs = [];
-
- if ('.' != path.charAt(0)) return path;
-
- curr = curr.split('/');
- path = path.split('/');
-
- for (var i = 0; i < path.length; ++i) {
- if ('..' == path[i]) {
- curr.pop();
- } else if ('.' != path[i] && '' != path[i]) {
- segs.push(path[i]);
- }
- }
-
- return curr.concat(segs).join('/');
-};
-
-/**
- * Register module at `path` with callback `definition`.
- *
- * @param {String} path
- * @param {Function} definition
- * @api private
- */
-
-require.register = function(path, definition) {
- require.modules[path] = definition;
-};
-
-/**
- * Alias a module definition.
- *
- * @param {String} from
- * @param {String} to
- * @api private
- */
-
-require.alias = function(from, to) {
- if (!require.modules.hasOwnProperty(from)) {
- throw new Error('Failed to alias "' + from + '", it does not exist');
- }
- require.aliases[to] = from;
-};
-
-/**
- * Return a require function relative to the `parent` path.
- *
- * @param {String} parent
- * @return {Function}
- * @api private
- */
-
-require.relative = function(parent) {
- var p = require.normalize(parent, '..');
-
- /**
- * lastIndexOf helper.
- */
-
- function lastIndexOf(arr, obj) {
- var i = arr.length;
- while (i--) {
- if (arr[i] === obj) return i;
- }
- return -1;
- }
-
- /**
- * The relative require() itself.
- */
-
- function localRequire(path) {
- var resolved = localRequire.resolve(path);
- return require(resolved, parent, path);
- }
-
- /**
- * Resolve relative to the parent.
- */
-
- localRequire.resolve = function(path) {
- var c = path.charAt(0);
- if ('/' == c) return path.slice(1);
- if ('.' == c) return require.normalize(p, path);
-
- // resolve deps by returning
- // the dep in the nearest "deps"
- // directory
- var segs = parent.split('/');
- var i = lastIndexOf(segs, 'deps') + 1;
- if (!i) i = 0;
- path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
- return path;
- };
-
- /**
- * Check if module is defined at `path`.
- */
-
- localRequire.exists = function(path) {
- return require.modules.hasOwnProperty(localRequire.resolve(path));
- };
-
- return localRequire;
-};
-require.register("isarray/index.js", function(exports, require, module){
-module.exports = Array.isArray || function (arr) {
- return Object.prototype.toString.call(arr) == '[object Array]';
-};
-
-});
-require.alias("isarray/index.js", "isarray/index.js");
-
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/readable-stream/node_modules/isarray/index.js
index 5f5ad45d46dda9..a57f63495943a0 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/isarray/index.js
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/index.js
@@ -1,3 +1,5 @@
+var toString = {}.toString;
+
module.exports = Array.isArray || function (arr) {
- return Object.prototype.toString.call(arr) == '[object Array]';
+ return toString.call(arr) == '[object Array]';
};
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/readable-stream/node_modules/isarray/package.json
index fb1eb3786d8168..703ea43cb4d5ac 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/isarray/package.json
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/package.json
@@ -1,19 +1,16 @@
{
"name": "isarray",
"description": "Array#isArray for older browsers",
- "version": "0.0.1",
+ "version": "1.0.0",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/isarray.git"
},
"homepage": "https://github.com/juliangruber/isarray",
"main": "index.js",
- "scripts": {
- "test": "tap test/*.js"
- },
"dependencies": {},
"devDependencies": {
- "tap": "*"
+ "tape": "~2.13.4"
},
"keywords": [
"browser",
@@ -26,13 +23,49 @@
"url": "http://juliangruber.com"
},
"license": "MIT",
- "readme": "\n# isarray\n\n`Array#isArray` for older browsers.\n\n## Usage\n\n```js\nvar isArray = require('isarray');\n\nconsole.log(isArray([])); // => true\nconsole.log(isArray({})); // => false\n```\n\n## Installation\n\nWith [npm](http://npmjs.org) do\n\n```bash\n$ npm install isarray\n```\n\nThen bundle for the browser with\n[browserify](https://github.com/substack/browserify).\n\nWith [component](http://component.io) do\n\n```bash\n$ component install juliangruber/isarray\n```\n\n## License\n\n(MIT)\n\nCopyright (c) 2013 Julian Gruber <julian@juliangruber.com>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
- "readmeFilename": "README.md",
+ "testling": {
+ "files": "test.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/17..latest",
+ "firefox/nightly",
+ "chrome/22..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ },
+ "scripts": {
+ "test": "tape test.js"
+ },
+ "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33",
"bugs": {
"url": "https://github.com/juliangruber/isarray/issues"
},
- "_id": "isarray@0.0.1",
- "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
- "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "_from": "isarray@0.0.1"
+ "_id": "isarray@1.0.0",
+ "_shasum": "bb935d48582cba168c06834957a54a3e07124f11",
+ "_from": "isarray@>=1.0.0 <1.1.0",
+ "_npmVersion": "3.3.12",
+ "_nodeVersion": "5.1.0",
+ "_npmUser": {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ },
+ "dist": {
+ "shasum": "bb935d48582cba168c06834957a54a3e07124f11",
+ "tarball": "http://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "juliangruber",
+ "email": "julian@juliangruber.com"
+ }
+ ],
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/readable-stream/node_modules/isarray/test.js
new file mode 100644
index 00000000000000..f7f7bcd19fec56
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/isarray/test.js
@@ -0,0 +1,19 @@
+var isArray = require('./');
+var test = require('tape');
+
+test('is array', function(t){
+ t.ok(isArray([]));
+ t.notOk(isArray({}));
+ t.notOk(isArray(null));
+ t.notOk(isArray(false));
+
+ var obj = {};
+ obj[0] = true;
+ t.notOk(isArray(obj));
+
+ var arr = [];
+ arr.foo = 'bar';
+ t.ok(isArray(arr));
+
+ t.end();
+});
diff --git a/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
new file mode 100644
index 00000000000000..36201b10017a5e
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/.travis.yml
@@ -0,0 +1,12 @@
+language: node_js
+node_js:
+ - "0.8"
+ - "0.10"
+ - "0.11"
+ - "0.12"
+ - "1.7.1"
+ - 1
+ - 2
+ - 3
+ - 4
+ - 5
diff --git a/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/index.js
new file mode 100644
index 00000000000000..a4f40f845faa65
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/index.js
@@ -0,0 +1,43 @@
+'use strict';
+
+if (!process.version ||
+ process.version.indexOf('v0.') === 0 ||
+ process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
+ module.exports = nextTick;
+} else {
+ module.exports = process.nextTick;
+}
+
+function nextTick(fn, arg1, arg2, arg3) {
+ if (typeof fn !== 'function') {
+ throw new TypeError('"callback" argument must be a function');
+ }
+ var len = arguments.length;
+ var args, i;
+ switch (len) {
+ case 0:
+ case 1:
+ return process.nextTick(fn);
+ case 2:
+ return process.nextTick(function afterTickOne() {
+ fn.call(null, arg1);
+ });
+ case 3:
+ return process.nextTick(function afterTickTwo() {
+ fn.call(null, arg1, arg2);
+ });
+ case 4:
+ return process.nextTick(function afterTickThree() {
+ fn.call(null, arg1, arg2, arg3);
+ });
+ default:
+ args = new Array(len - 1);
+ i = 0;
+ while (i < args.length) {
+ args[i++] = arguments[i];
+ }
+ return process.nextTick(function afterTick() {
+ fn.apply(null, args);
+ });
+ }
+}
diff --git a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/LICENSE b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/license.md
similarity index 74%
rename from deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/LICENSE
rename to deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/license.md
index bcbe13d67a9621..c67e3532b54245 100644
--- a/deps/npm/node_modules/npmlog/node_modules/gauge/node_modules/lodash.padend/node_modules/lodash.repeat/LICENSE
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/license.md
@@ -1,8 +1,4 @@
-The MIT License (MIT)
-
-Copyright 2012-2016 The Dojo Foundation
-Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
-DocumentCloud and Investigative Reporters & Editors
+# Copyright (c) 2015 Calvin Metcalf
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -14,10 +10,10 @@ furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+SOFTWARE.**
diff --git a/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/package.json
new file mode 100644
index 00000000000000..211b098d4cbb95
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/package.json
@@ -0,0 +1,49 @@
+{
+ "name": "process-nextick-args",
+ "version": "1.0.7",
+ "description": "process.nextTick but always with args",
+ "main": "index.js",
+ "scripts": {
+ "test": "node test.js"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/calvinmetcalf/process-nextick-args.git"
+ },
+ "author": "",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/calvinmetcalf/process-nextick-args/issues"
+ },
+ "homepage": "https://github.com/calvinmetcalf/process-nextick-args",
+ "devDependencies": {
+ "tap": "~0.2.6"
+ },
+ "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39",
+ "_id": "process-nextick-args@1.0.7",
+ "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3",
+ "_from": "process-nextick-args@>=1.0.6 <1.1.0",
+ "_npmVersion": "3.8.6",
+ "_nodeVersion": "5.11.0",
+ "_npmUser": {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "dist": {
+ "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3",
+ "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ }
+ ],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/readme.md b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
new file mode 100644
index 00000000000000..78e7cfaeb7acde
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/readme.md
@@ -0,0 +1,18 @@
+process-nextick-args
+=====
+
+[](https://travis-ci.org/calvinmetcalf/process-nextick-args)
+
+```bash
+npm install --save process-nextick-args
+```
+
+Always be able to pass arguments to process.nextTick, no matter the platform
+
+```js
+var nextTick = require('process-nextick-args');
+
+nextTick(function (a, b, c) {
+ console.log(a, b, c);
+}, 'step', 3, 'profit');
+```
diff --git a/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/test.js b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/test.js
new file mode 100644
index 00000000000000..ef15721584ac99
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/process-nextick-args/test.js
@@ -0,0 +1,24 @@
+var test = require("tap").test;
+var nextTick = require('./');
+
+test('should work', function (t) {
+ t.plan(5);
+ nextTick(function (a) {
+ t.ok(a);
+ nextTick(function (thing) {
+ t.equals(thing, 7);
+ }, 7);
+ }, true);
+ nextTick(function (a, b, c) {
+ t.equals(a, 'step');
+ t.equals(b, 3);
+ t.equals(c, 'profit');
+ }, 'step', 3, 'profit');
+});
+
+test('correct number of arguments', function (t) {
+ t.plan(1);
+ nextTick(function () {
+ t.equals(2, arguments.length, 'correct number');
+ }, 1, 2);
+});
diff --git a/deps/npm/node_modules/readable-stream/node_modules/string_decoder/package.json b/deps/npm/node_modules/readable-stream/node_modules/string_decoder/package.json
index ee70702359198d..0364d54ba46af6 100644
--- a/deps/npm/node_modules/readable-stream/node_modules/string_decoder/package.json
+++ b/deps/npm/node_modules/readable-stream/node_modules/string_decoder/package.json
@@ -22,13 +22,33 @@
"browserify"
],
"license": "MIT",
- "readme": "**string_decoder.js** (`require('string_decoder')`) from Node.js core\n\nCopyright Joyent, Inc. and other Node contributors. See LICENCE file for details.\n\nVersion numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**\n\nThe *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.",
- "readmeFilename": "README.md",
+ "gitHead": "d46d4fd87cf1d06e031c23f1ba170ca7d4ade9a0",
"bugs": {
"url": "https://github.com/rvagg/string_decoder/issues"
},
"_id": "string_decoder@0.10.31",
"_shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
+ "_from": "string_decoder@>=0.10.0 <0.11.0",
+ "_npmVersion": "1.4.23",
+ "_npmUser": {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ },
+ "maintainers": [
+ {
+ "name": "substack",
+ "email": "mail@substack.net"
+ },
+ {
+ "name": "rvagg",
+ "email": "rod@vagg.org"
+ }
+ ],
+ "dist": {
+ "shasum": "62e203bc41766c6c28c9fc84301dab1c5310fa94",
+ "tarball": "http://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
+ },
+ "directories": {},
"_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
- "_from": "string_decoder@>=0.10.0 <0.11.0"
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/History.md b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/History.md
new file mode 100644
index 00000000000000..acc8675372e980
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/History.md
@@ -0,0 +1,16 @@
+
+1.0.2 / 2015-10-07
+==================
+
+ * use try/catch when checking `localStorage` (#3, @kumavis)
+
+1.0.1 / 2014-11-25
+==================
+
+ * browser: use `console.warn()` for deprecation calls
+ * browser: more jsdocs
+
+1.0.0 / 2014-04-30
+==================
+
+ * initial commit
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/LICENSE b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
new file mode 100644
index 00000000000000..6a60e8c225c9ba
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/README.md b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/README.md
new file mode 100644
index 00000000000000..75622fa7c250a6
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/README.md
@@ -0,0 +1,53 @@
+util-deprecate
+==============
+### The Node.js `util.deprecate()` function with browser support
+
+In Node.js, this module simply re-exports the `util.deprecate()` function.
+
+In the web browser (i.e. via browserify), a browser-specific implementation
+of the `util.deprecate()` function is used.
+
+
+## API
+
+A `deprecate()` function is the only thing exposed by this module.
+
+``` javascript
+// setup:
+exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead');
+
+
+// users see:
+foo();
+// foo() is deprecated, use bar() instead
+foo();
+foo();
+```
+
+
+## License
+
+(The MIT License)
+
+Copyright (c) 2014 Nathan Rajlich
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/browser.js b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/browser.js
new file mode 100644
index 00000000000000..549ae2f065ea5a
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/browser.js
@@ -0,0 +1,67 @@
+
+/**
+ * Module exports.
+ */
+
+module.exports = deprecate;
+
+/**
+ * Mark that a method should not be used.
+ * Returns a modified function which warns once by default.
+ *
+ * If `localStorage.noDeprecation = true` is set, then it is a no-op.
+ *
+ * If `localStorage.throwDeprecation = true` is set, then deprecated functions
+ * will throw an Error when invoked.
+ *
+ * If `localStorage.traceDeprecation = true` is set, then deprecated functions
+ * will invoke `console.trace()` instead of `console.error()`.
+ *
+ * @param {Function} fn - the function to deprecate
+ * @param {String} msg - the string to print to the console when `fn` is invoked
+ * @returns {Function} a new "deprecated" version of `fn`
+ * @api public
+ */
+
+function deprecate (fn, msg) {
+ if (config('noDeprecation')) {
+ return fn;
+ }
+
+ var warned = false;
+ function deprecated() {
+ if (!warned) {
+ if (config('throwDeprecation')) {
+ throw new Error(msg);
+ } else if (config('traceDeprecation')) {
+ console.trace(msg);
+ } else {
+ console.warn(msg);
+ }
+ warned = true;
+ }
+ return fn.apply(this, arguments);
+ }
+
+ return deprecated;
+}
+
+/**
+ * Checks `localStorage` for boolean values for the given `name`.
+ *
+ * @param {String} name
+ * @returns {Boolean}
+ * @api private
+ */
+
+function config (name) {
+ // accessing global.localStorage can trigger a DOMException in sandboxed iframes
+ try {
+ if (!global.localStorage) return false;
+ } catch (_) {
+ return false;
+ }
+ var val = global.localStorage[name];
+ if (null == val) return false;
+ return String(val).toLowerCase() === 'true';
+}
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/node.js b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/node.js
new file mode 100644
index 00000000000000..5e6fcff5ddd3fb
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/node.js
@@ -0,0 +1,6 @@
+
+/**
+ * For Node.js, simply re-export the core `util.deprecate` function.
+ */
+
+module.exports = require('util').deprecate;
diff --git a/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/package.json b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/package.json
new file mode 100644
index 00000000000000..ae0c70f6c633f1
--- /dev/null
+++ b/deps/npm/node_modules/readable-stream/node_modules/util-deprecate/package.json
@@ -0,0 +1,54 @@
+{
+ "name": "util-deprecate",
+ "version": "1.0.2",
+ "description": "The Node.js `util.deprecate()` function with browser support",
+ "main": "node.js",
+ "browser": "browser.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/util-deprecate.git"
+ },
+ "keywords": [
+ "util",
+ "deprecate",
+ "browserify",
+ "browser",
+ "node"
+ ],
+ "author": {
+ "name": "Nathan Rajlich",
+ "email": "nathan@tootallnate.net",
+ "url": "http://n8.io/"
+ },
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/util-deprecate/issues"
+ },
+ "homepage": "https://github.com/TooTallNate/util-deprecate",
+ "gitHead": "475fb6857cd23fafff20c1be846c1350abf8e6d4",
+ "_id": "util-deprecate@1.0.2",
+ "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf",
+ "_from": "util-deprecate@>=1.0.1 <1.1.0",
+ "_npmVersion": "2.14.4",
+ "_nodeVersion": "4.1.2",
+ "_npmUser": {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ },
+ "maintainers": [
+ {
+ "name": "tootallnate",
+ "email": "nathan@tootallnate.net"
+ }
+ ],
+ "dist": {
+ "shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf",
+ "tarball": "http://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/readable-stream/package.json b/deps/npm/node_modules/readable-stream/package.json
index fbc5e7fa4983be..745c4d8ef25272 100644
--- a/deps/npm/node_modules/readable-stream/package.json
+++ b/deps/npm/node_modules/readable-stream/package.json
@@ -1,23 +1,33 @@
{
"name": "readable-stream",
- "version": "1.1.13",
- "description": "Streams3, a user-land copy of the stream library from Node.js v0.11.x",
+ "version": "2.1.2",
+ "description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"dependencies": {
"core-util-is": "~1.0.0",
- "isarray": "0.0.1",
+ "inherits": "~2.0.1",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~1.0.6",
"string_decoder": "~0.10.x",
- "inherits": "~2.0.1"
+ "util-deprecate": "~1.0.1"
},
"devDependencies": {
- "tap": "~0.2.6"
+ "nyc": "^6.4.0",
+ "tap": "~0.7.1",
+ "tape": "~4.5.1",
+ "zuul": "~3.9.0"
},
"scripts": {
- "test": "tap test/simple/*.js"
+ "test": "tap test/parallel/*.js test/ours/*.js",
+ "browser": "npm run write-zuul && zuul -- test/browser.js",
+ "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml",
+ "local": "zuul --local -- test/browser.js",
+ "cover": "nyc npm test",
+ "report": "nyc report --reporter=lcov"
},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/readable-stream.git"
+ "url": "git://github.com/nodejs/readable-stream.git"
},
"keywords": [
"readable",
@@ -27,29 +37,33 @@
"browser": {
"util": false
},
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
+ "nyc": {
+ "include": [
+ "lib/**.js"
+ ]
},
"license": "MIT",
- "gitHead": "3b672fd7ae92acf5b4ffdbabf74b372a0a56b051",
+ "gitHead": "06754eed4f2b882b589f8667ecc8aadcf916045f",
"bugs": {
- "url": "https://github.com/isaacs/readable-stream/issues"
+ "url": "https://github.com/nodejs/readable-stream/issues"
},
- "homepage": "https://github.com/isaacs/readable-stream",
- "_id": "readable-stream@1.1.13",
- "_shasum": "f6eef764f514c89e2b9e23146a75ba106756d23e",
- "_from": "readable-stream@>=1.1.13 <1.2.0",
- "_npmVersion": "1.4.23",
+ "_id": "readable-stream@2.1.2",
+ "_shasum": "a92b6e854f13ff0685e4ca7dce6cf73d3e319422",
+ "_from": "readable-stream@2.1.2",
+ "_npmVersion": "3.8.3",
+ "_nodeVersion": "5.10.1",
"_npmUser": {
- "name": "rvagg",
- "email": "rod@vagg.org"
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
+ },
+ "dist": {
+ "shasum": "a92b6e854f13ff0685e4ca7dce6cf73d3e319422",
+ "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.2.tgz"
},
"maintainers": [
{
"name": "isaacs",
- "email": "i@izs.me"
+ "email": "isaacs@npmjs.com"
},
{
"name": "tootallnate",
@@ -58,13 +72,18 @@
{
"name": "rvagg",
"email": "rod@vagg.org"
+ },
+ {
+ "name": "cwmma",
+ "email": "calvin.metcalf@gmail.com"
}
],
- "dist": {
- "shasum": "f6eef764f514c89e2b9e23146a75ba106756d23e",
- "tarball": "http://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz"
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/readable-stream-2.1.2.tgz_1461933796258_0.5075750169344246"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.13.tgz",
- "readme": "ERROR: No README data found!"
+ "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.2.tgz",
+ "readme": "ERROR: No README data found!",
+ "homepage": "https://github.com/nodejs/readable-stream#readme"
}
diff --git a/deps/npm/node_modules/readable-stream/readable.js b/deps/npm/node_modules/readable-stream/readable.js
index 09b8bf5091a82e..be2688a071dd18 100644
--- a/deps/npm/node_modules/readable-stream/readable.js
+++ b/deps/npm/node_modules/readable-stream/readable.js
@@ -1,7 +1,16 @@
+var Stream = (function (){
+ try {
+ return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
+ } catch(_){}
+}());
exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = require('stream');
+exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
+
+if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) {
+ module.exports = Stream;
+}
diff --git a/deps/npm/node_modules/request/.travis.yml b/deps/npm/node_modules/request/.travis.yml
index c24c59b5d5b4fa..e5d9bde26263c5 100644
--- a/deps/npm/node_modules/request/.travis.yml
+++ b/deps/npm/node_modules/request/.travis.yml
@@ -1,15 +1,19 @@
language: node_js
+
node_js:
- node
- - io.js
- 0.12
- 0.10
-sudo: false
-after_script: "npm run test-cov && cat ./coverage/lcov.info | codecov && cat ./coverage/lcov.info | coveralls"
+after_script:
+ - npm run test-cov
+ - cat ./coverage/lcov.info | codecov
+ - cat ./coverage/lcov.info | coveralls
webhooks:
urls: https://webhooks.gitter.im/e/237280ed4796c19cc626
on_success: change # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: false # default: false
+
+sudo: false
diff --git a/deps/npm/node_modules/request/CHANGELOG.md b/deps/npm/node_modules/request/CHANGELOG.md
index 86386a9ae5cba4..ce6826f2f65228 100644
--- a/deps/npm/node_modules/request/CHANGELOG.md
+++ b/deps/npm/node_modules/request/CHANGELOG.md
@@ -1,5 +1,34 @@
## Change Log
+### v2.72.0 (2016/04/17)
+- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov)
+- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou)
+- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz)
+- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov)
+- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov)
+- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo)
+
+### v2.71.0 (2016/04/12)
+- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov)
+
+### v2.70.0 (2016/04/05)
+- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot)
+- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo)
+- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot)
+- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot)
+- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot)
+- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov)
+- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot)
+- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov)
+- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot)
+- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov)
+- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm)
+- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov)
+- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot)
+
+### v2.69.0 (2016/01/27)
+- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg)
+
### v2.68.0 (2016/01/27)
- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods)
- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross)
@@ -85,7 +114,8 @@
- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews)
### v2.59.0 (2015/07/20)
-- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options.
Forever option defaults to using http(s).Agent in node 0.12+ (@simov)
+- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options.
+ Forever option defaults to using http(s).Agent in node 0.12+ (@simov)
- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman)
- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme)
- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov)
@@ -470,7 +500,7 @@
- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway421)
- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka)
-### v2.18.0 (2013/04/22)
+### v2.20.0 (2013/04/22)
- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway421)
- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway421)
- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun)
diff --git a/deps/npm/node_modules/request/CONTRIBUTING.md b/deps/npm/node_modules/request/CONTRIBUTING.md
index 06b1968d974b7e..8aa6999acd820e 100644
--- a/deps/npm/node_modules/request/CONTRIBUTING.md
+++ b/deps/npm/node_modules/request/CONTRIBUTING.md
@@ -1,19 +1,57 @@
-# This is an OPEN Open Source Project
+
+# Contributing to Request
+
+:+1::tada: First off, thanks for taking the time to contribute! :tada::+1:
+
+The following is a set of guidelines for contributing to Request and its packages, which are hosted in the [Request Organization](https://github.com/request) on GitHub.
+These are just guidelines, not rules, use your best judgment and feel free to propose changes to this document in a pull request.
+
+
+## Submitting an Issue
+
+1. Provide a small self **sufficient** code example to **reproduce** the issue.
+2. Run your test code using [request-debug](https://github.com/request/request-debug) and copy/paste the results inside the issue.
+3. You should **always** use fenced code blocks when submitting code examples or any other formatted output:
+
+ ```js
+ put your javascript code here
+ ```
+
+ ```
+ put any other formatted output here,
+ like for example the one returned from using request-debug
+ ```
+
+
+If the problem cannot be reliably reproduced, the issue will be marked as `Not enough info (see CONTRIBUTING.md)`.
+
+If the problem is not related to request the issue will be marked as `Help (please use Stackoverflow)`.
+
+
+## Submitting a Pull Request
+
+1. In almost all of the cases your PR **needs tests**. Make sure you have any.
+2. Run `npm test` locally. Fix any errors before pushing to GitHub.
+3. After submitting the PR a build will be triggered on TravisCI. Wait for it to ends and make sure all jobs are passing.
+
-----------------------------------------
-## What?
+
+## Becoming a Contributor
Individuals making significant and valuable contributions are given
commit-access to the project to contribute as they see fit. This project is
more like an open wiki than a standard guarded open source project.
+
## Rules
There are a few basic ground-rules for contributors:
1. **No `--force` pushes** or modifying the Git history in any way.
1. **Non-master branches** ought to be used for ongoing work.
+1. **Any** change should be added through Pull Request.
1. **External API changes and significant modifications** ought to be subject
to an **internal pull-request** to solicit feedback from other contributors.
1. Internal pull-requests to solicit feedback are *encouraged* for any other
@@ -35,10 +73,9 @@ There are a few basic ground-rules for contributors:
Declaring formal releases remains the prerogative of the project maintainer.
+
## Changes to this arrangement
This is an experiment and feedback is welcome! This document may also be
subject to pull-requests or changes by contributors where you believe you have
something valuable to add or change.
-
------------------------------------------
diff --git a/deps/npm/node_modules/request/README.md b/deps/npm/node_modules/request/README.md
index 6ee45b205d5473..cf9072a21a229a 100644
--- a/deps/npm/node_modules/request/README.md
+++ b/deps/npm/node_modules/request/README.md
@@ -3,10 +3,11 @@
[](https://nodei.co/npm/request/)
-[](https://travis-ci.org/request/request)
+[](https://travis-ci.org/request/request)
[](https://codecov.io/github/request/request?branch=master)
[](https://coveralls.io/r/request/request)
[](https://david-dm.org/request/request)
+[](https://snyk.io/test/npm/request)
[](https://gitter.im/request/request?utm_source=badge)
@@ -733,7 +734,7 @@ The first argument can be either a `url` or an `options` object. The only requir
---
-- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer` or `String`, unless `json` is `true`. If `json` is `true`, then `body` must be a JSON-serializable object.
+- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object.
- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above.
- `formData` - Data to pass for a `multipart/form-data` request. See
[Forms](#forms) section above.
@@ -748,6 +749,7 @@ The first argument can be either a `url` or an `options` object. The only requir
- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request.
- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON.
- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body.
+- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body.
---
@@ -812,6 +814,7 @@ default in Linux can be anywhere from 20-120 seconds][linux-timeout]).
- `time` - If `true`, the request-response cycle (including all redirects) is timed at millisecond resolution, and the result provided on the response's `elapsedTime` property.
- `har` - A [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-1.2) for details)*
+- `callback` - alternatively pass the request's callback in the options object
The callback argument gets 3 arguments:
@@ -886,12 +889,13 @@ Same as `request()`, but defaults to `method: "HEAD"`.
request.head(url)
```
-### request.del
+### request.del / request.delete
Same as `request()`, but defaults to `method: "DELETE"`.
```js
request.del(url)
+request.delete(url)
```
### request.get
diff --git a/deps/npm/node_modules/request/index.js b/deps/npm/node_modules/request/index.js
index 4d0c748da585ea..911a90dbb5a5cb 100755
--- a/deps/npm/node_modules/request/index.js
+++ b/deps/npm/node_modules/request/index.js
@@ -37,7 +37,7 @@ function initParams(uri, options, callback) {
extend(params, uri)
}
- params.callback = callback
+ params.callback = callback || params.callback
return params
}
@@ -56,7 +56,7 @@ function request (uri, options, callback) {
}
function verbFunc (verb) {
- var method = verb === 'del' ? 'DELETE' : verb.toUpperCase()
+ var method = verb.toUpperCase()
return function (uri, options, callback) {
var params = initParams(uri, options, callback)
params.method = method
@@ -70,7 +70,8 @@ request.head = verbFunc('head')
request.post = verbFunc('post')
request.put = verbFunc('put')
request.patch = verbFunc('patch')
-request.del = verbFunc('del')
+request.del = verbFunc('delete')
+request['delete'] = verbFunc('delete')
request.jar = function (store) {
return cookies.jar(store)
@@ -91,7 +92,7 @@ function wrapRequestMethod (method, options, requester, verb) {
target.pool = params.pool || options.pool
if (verb) {
- target.method = (verb === 'del' ? 'DELETE' : verb.toUpperCase())
+ target.method = verb.toUpperCase()
}
if (isFunction(requester)) {
@@ -114,7 +115,7 @@ request.defaults = function (options, requester) {
var defaults = wrapRequestMethod(self, options, requester)
- var verbs = ['get', 'head', 'post', 'put', 'patch', 'del']
+ var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete']
verbs.forEach(function(verb) {
defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb)
})
diff --git a/deps/npm/node_modules/request/lib/helpers.js b/deps/npm/node_modules/request/lib/helpers.js
index 5e8594606f9f4e..356ff748e2f941 100644
--- a/deps/npm/node_modules/request/lib/helpers.js
+++ b/deps/npm/node_modules/request/lib/helpers.js
@@ -24,12 +24,12 @@ function paramsHaveRequestBody(params) {
)
}
-function safeStringify (obj) {
+function safeStringify (obj, replacer) {
var ret
try {
- ret = JSON.stringify(obj)
+ ret = JSON.stringify(obj, replacer)
} catch (e) {
- ret = jsonSafeStringify(obj)
+ ret = jsonSafeStringify(obj, replacer)
}
return ret
}
diff --git a/deps/npm/node_modules/request/node_modules/aws4/.npmignore b/deps/npm/node_modules/request/node_modules/aws4/.npmignore
index ccafa6b412bff2..6c6ade6ff0135c 100644
--- a/deps/npm/node_modules/request/node_modules/aws4/.npmignore
+++ b/deps/npm/node_modules/request/node_modules/aws4/.npmignore
@@ -1,3 +1,4 @@
test
-coverage
examples
+example.js
+browser
diff --git a/deps/npm/node_modules/request/node_modules/aws4/.tern-port b/deps/npm/node_modules/request/node_modules/aws4/.tern-port
new file mode 100644
index 00000000000000..7fd1b5223deb90
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/aws4/.tern-port
@@ -0,0 +1 @@
+62638
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/aws4/README.md b/deps/npm/node_modules/request/node_modules/aws4/README.md
index 068a07a8359617..ec663e9237ec5e 100644
--- a/deps/npm/node_modules/request/node_modules/aws4/README.md
+++ b/deps/npm/node_modules/request/node_modules/aws4/README.md
@@ -6,6 +6,8 @@ aws4
A small utility to sign vanilla node.js http(s) request options using Amazon's
[AWS Signature Version 4](http://docs.amazonwebservices.com/general/latest/gr/signature-version-4.html).
+Can also be used [in the browser](./browser).
+
This signature is supported by nearly all Amazon services, including
[S3](http://docs.aws.amazon.com/AmazonS3/latest/API/),
[EC2](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/),
@@ -497,6 +499,8 @@ With [npm](http://npmjs.org/) do:
npm install aws4
```
+Can also be used [in the browser](./browser).
+
Thanks
------
diff --git a/deps/npm/node_modules/request/node_modules/aws4/aws4.js b/deps/npm/node_modules/request/node_modules/aws4/aws4.js
index 3ef54a468fc904..71e08bbc17c9d1 100644
--- a/deps/npm/node_modules/request/node_modules/aws4/aws4.js
+++ b/deps/npm/node_modules/request/node_modules/aws4/aws4.js
@@ -43,8 +43,13 @@ function RequestSigner(request, credentials) {
if (!request.method && request.body)
request.method = 'POST'
- if (!headers.Host && !headers.host)
+ if (!headers.Host && !headers.host) {
headers.Host = request.hostname || request.host || this.createHost()
+
+ // If a port is specified explicitly, use it as is
+ if (request.port)
+ headers.Host += ':' + request.port
+ }
if (!request.hostname && !request.host)
request.hostname = headers.Host || headers.host
}
@@ -210,8 +215,7 @@ RequestSigner.prototype.canonicalString = function() {
}
if (pathStr !== '/') {
if (normalizePath) pathStr = pathStr.replace(/\/{2,}/g, '/')
- if (pathStr[0] === '/') pathStr = pathStr.slice(1)
- pathStr = '/' + pathStr.split('/').reduce(function(path, piece) {
+ pathStr = pathStr.split('/').reduce(function(path, piece) {
if (normalizePath && piece === '..') {
path.pop()
} else if (!normalizePath || piece !== '.') {
@@ -220,6 +224,7 @@ RequestSigner.prototype.canonicalString = function() {
}
return path
}, []).join('/')
+ if (pathStr[0] !== '/') pathStr = '/' + pathStr
if (decodeSlashesInPath) pathStr = pathStr.replace(/%2F/g, '/')
}
diff --git a/deps/npm/node_modules/request/node_modules/aws4/example.js b/deps/npm/node_modules/request/node_modules/aws4/example.js
deleted file mode 100644
index 5d075409d5de95..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/example.js
+++ /dev/null
@@ -1,371 +0,0 @@
-var http = require('http'),
- https = require('https'),
- aws4 = require('aws4')
-
-// given an options object you could pass to http.request
-var opts = {host: 'sqs.us-east-1.amazonaws.com', path: '/?Action=ListQueues'}
-
-// alternatively (as aws4 can infer the host):
-opts = {service: 'sqs', region: 'us-east-1', path: '/?Action=ListQueues'}
-
-// alternatively (as us-east-1 is default):
-opts = {service: 'sqs', path: '/?Action=ListQueues'}
-
-aws4.sign(opts) // assumes AWS credentials are available in process.env
-
-console.log(opts)
-/*
-{
- host: 'sqs.us-east-1.amazonaws.com',
- path: '/?Action=ListQueues',
- headers: {
- Host: 'sqs.us-east-1.amazonaws.com',
- 'X-Amz-Date': '20121226T061030Z',
- Authorization: 'AWS4-HMAC-SHA256 Credential=ABCDEF/20121226/us-east-1/sqs/aws4_request, ...'
- }
-}
-*/
-
-// we can now use this to query AWS using the standard node.js http API
-http.request(opts, function(res) { res.pipe(process.stdout) }).end()
-/*
-
-
-...
-*/
-
-// you can also pass AWS credentials in explicitly (otherwise taken from process.env)
-aws4.sign(opts, {accessKeyId: '', secretAccessKey: ''})
-
-// can also add the signature to query strings
-aws4.sign({service: 's3', path: '/my-bucket?X-Amz-Expires=12345', signQuery: true})
-
-// create a utility function to pipe to stdout (with https this time)
-function request(o) { https.request(o, function(res) { res.pipe(process.stdout) }).end(o.body || '') }
-
-// aws4 can infer the HTTP method if a body is passed in
-// method will be POST and Content-Type: 'application/x-www-form-urlencoded; charset=utf-8'
-request(aws4.sign({service: 'iam', body: 'Action=ListGroups&Version=2010-05-08'}))
-/*
-
-...
-*/
-
-// can specify any custom option or header as per usual
-request(aws4.sign({
- service: 'dynamodb',
- region: 'ap-southeast-2',
- method: 'POST',
- path: '/',
- headers: {
- 'Content-Type': 'application/x-amz-json-1.0',
- 'X-Amz-Target': 'DynamoDB_20120810.ListTables'
- },
- body: '{}'
-}))
-/*
-{"TableNames":[]}
-...
-*/
-
-// works with all other services that support Signature Version 4
-
-request(aws4.sign({service: 's3', path: '/', signQuery: true}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'ec2', path: '/?Action=DescribeRegions&Version=2014-06-15'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'sns', path: '/?Action=ListTopics&Version=2010-03-31'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'sts', path: '/?Action=GetSessionToken&Version=2011-06-15'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'cloudsearch', path: '/?Action=ListDomainNames&Version=2013-01-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'ses', path: '/?Action=ListIdentities&Version=2010-12-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'autoscaling', path: '/?Action=DescribeAutoScalingInstances&Version=2011-01-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'elasticloadbalancing', path: '/?Action=DescribeLoadBalancers&Version=2012-06-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'cloudformation', path: '/?Action=ListStacks&Version=2010-05-15'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'elasticbeanstalk', path: '/?Action=ListAvailableSolutionStacks&Version=2010-12-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'rds', path: '/?Action=DescribeDBInstances&Version=2012-09-17'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'monitoring', path: '/?Action=ListMetrics&Version=2010-08-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'redshift', path: '/?Action=DescribeClusters&Version=2012-12-01'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'cloudfront', path: '/2014-05-31/distribution'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'elasticache', path: '/?Action=DescribeCacheClusters&Version=2014-07-15'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'elasticmapreduce', path: '/?Action=DescribeJobFlows&Version=2009-03-31'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'route53', path: '/2013-04-01/hostedzone'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'appstream', path: '/applications'}))
-/*
-{"_links":{"curie":[{"href":"http://docs.aws.amazon.com/appstream/latest/...
-...
-*/
-
-request(aws4.sign({service: 'cognito-sync', path: '/identitypools'}))
-/*
-{"Count":0,"IdentityPoolUsages":[],"MaxResults":16,"NextToken":null}
-...
-*/
-
-request(aws4.sign({service: 'elastictranscoder', path: '/2012-09-25/pipelines'}))
-/*
-{"NextPageToken":null,"Pipelines":[]}
-...
-*/
-
-request(aws4.sign({service: 'lambda', path: '/2014-11-13/functions/'}))
-/*
-{"Functions":[],"NextMarker":null}
-...
-*/
-
-request(aws4.sign({service: 'ecs', path: '/?Action=ListClusters&Version=2014-11-13'}))
-/*
-
-...
-*/
-
-request(aws4.sign({service: 'glacier', path: '/-/vaults', headers: {'X-Amz-Glacier-Version': '2012-06-01'}}))
-/*
-{"Marker":null,"VaultList":[]}
-...
-*/
-
-request(aws4.sign({service: 'storagegateway', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'StorageGateway_20120630.ListGateways'
-}}))
-/*
-{"Gateways":[]}
-...
-*/
-
-request(aws4.sign({service: 'datapipeline', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'DataPipeline.ListPipelines'
-}}))
-/*
-{"hasMoreResults":false,"pipelineIdList":[]}
-...
-*/
-
-request(aws4.sign({service: 'opsworks', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'OpsWorks_20130218.DescribeStacks'
-}}))
-/*
-{"Stacks":[]}
-...
-*/
-
-request(aws4.sign({service: 'route53domains', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'Route53Domains_v20140515.ListDomains'
-}}))
-/*
-{"Domains":[]}
-...
-*/
-
-request(aws4.sign({service: 'kinesis', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'Kinesis_20131202.ListStreams'
-}}))
-/*
-{"HasMoreStreams":false,"StreamNames":[]}
-...
-*/
-
-request(aws4.sign({service: 'cloudtrail', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'CloudTrail_20131101.DescribeTrails'
-}}))
-/*
-{"trailList":[]}
-...
-*/
-
-request(aws4.sign({service: 'logs', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'Logs_20140328.DescribeLogGroups'
-}}))
-/*
-{"logGroups":[]}
-...
-*/
-
-request(aws4.sign({service: 'codedeploy', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'CodeDeploy_20141006.ListApplications'
-}}))
-/*
-{"applications":[]}
-...
-*/
-
-request(aws4.sign({service: 'directconnect', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'OvertureService.DescribeConnections'
-}}))
-/*
-{"connections":[]}
-...
-*/
-
-request(aws4.sign({service: 'kms', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'TrentService.ListKeys'
-}}))
-/*
-{"Keys":[],"Truncated":false}
-...
-*/
-
-request(aws4.sign({service: 'config', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'StarlingDoveService.DescribeDeliveryChannels'
-}}))
-/*
-{"DeliveryChannels":[]}
-...
-*/
-
-request(aws4.sign({service: 'cloudhsm', body: '{}', headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'CloudHsmFrontendService.ListAvailableZones'
-}}))
-/*
-{"AZList":["us-east-1a","us-east-1b","us-east-1c"]}
-...
-*/
-
-request(aws4.sign({
- service: 'swf',
- body: '{"registrationStatus":"REGISTERED"}',
- headers: {
- 'Content-Type': 'application/x-amz-json-1.0',
- 'X-Amz-Target': 'SimpleWorkflowService.ListDomains'
- }
-}))
-/*
-{"domainInfos":[]}
-...
-*/
-
-request(aws4.sign({
- service: 'cognito-identity',
- body: '{"MaxResults": 1}',
- headers: {
- 'Content-Type': 'application/x-amz-json-1.1',
- 'X-Amz-Target': 'AWSCognitoIdentityService.ListIdentityPools'
- }
-}))
-/*
-{"IdentityPools":[]}
-...
-*/
-
-request(aws4.sign({
- service: 'mobileanalytics',
- path: '/2014-06-05/events',
- body: JSON.stringify({events:[{
- eventType: 'a',
- timestamp: new Date().toISOString(),
- session: {},
- }]}),
- headers: {
- 'Content-Type': 'application/json',
- 'X-Amz-Client-Context': JSON.stringify({
- client: {client_id: 'a', app_title: 'a'},
- custom: {},
- env: {platform: 'a'},
- services: {},
- }),
- }
-}))
-/*
-(HTTP 202, empty response)
-*/
-
-// Still not updated to v4...
-
-//request(aws4.sign({service: 'importexport', path: '/?Action=ListJobs&Version=2010-06-01'}))
-
-//request(aws4.sign({service: 'sdb', path: '/?Action=ListDomains&Version=2009-04-15'}))
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.npmignore b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.npmignore
deleted file mode 100644
index 07e6e472cc75fa..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-/node_modules
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.travis.yml b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.travis.yml
deleted file mode 100644
index 4af02b3d17e64c..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/.travis.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-language: node_js
-node_js:
- - '0.8'
- - '0.10'
- - '0.12'
- - 'iojs'
-before_install:
- - npm install -g npm@latest
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/CONTRIBUTORS b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/CONTRIBUTORS
deleted file mode 100644
index 4a0bc5033a06e7..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/CONTRIBUTORS
+++ /dev/null
@@ -1,14 +0,0 @@
-# Authors, sorted by whether or not they are me
-Isaac Z. Schlueter
-Brian Cottingham
-Carlos Brito Lage
-Jesse Dailey
-Kevin O'Hara
-Marco Rogers
-Mark Cavage
-Marko Mikulicic
-Nathan Rajlich
-Satheesh Natesan
-Trent Mick
-ashleybrener
-n4kz
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/README.md b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/README.md
deleted file mode 100644
index c06814e0414d56..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/README.md
+++ /dev/null
@@ -1,137 +0,0 @@
-# lru cache
-
-A cache object that deletes the least-recently-used items.
-
-## Usage:
-
-```javascript
-var LRU = require("lru-cache")
- , options = { max: 500
- , length: function (n) { return n * 2 }
- , dispose: function (key, n) { n.close() }
- , maxAge: 1000 * 60 * 60 }
- , cache = LRU(options)
- , otherCache = LRU(50) // sets just the max size
-
-cache.set("key", "value")
-cache.get("key") // "value"
-
-cache.reset() // empty the cache
-```
-
-If you put more stuff in it, then items will fall out.
-
-If you try to put an oversized thing in it, then it'll fall out right
-away.
-
-## Keys should always be Strings or Numbers
-
-Note: this module will print warnings to `console.error` if you use a
-key that is not a String or Number. Because items are stored in an
-object, which coerces keys to a string, it won't go well for you if
-you try to use a key that is not a unique string, it'll cause surprise
-collisions. For example:
-
-```JavaScript
-// Bad Example! Dont' do this!
-var cache = LRU()
-var a = {}
-var b = {}
-cache.set(a, 'this is a')
-cache.set(b, 'this is b')
-console.log(cache.get(a)) // prints: 'this is b'
-```
-
-## Options
-
-* `max` The maximum size of the cache, checked by applying the length
- function to all values in the cache. Not setting this is kind of
- silly, since that's the whole purpose of this lib, but it defaults
- to `Infinity`.
-* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
- as they age, but if you try to get an item that is too old, it'll
- drop it and return undefined instead of giving it to you.
-* `length` Function that is used to calculate the length of stored
- items. If you're storing strings or buffers, then you probably want
- to do something like `function(n){return n.length}`. The default is
- `function(n){return 1}`, which is fine if you want to store `max`
- like-sized things.
-* `dispose` Function that is called on items when they are dropped
- from the cache. This can be handy if you want to close file
- descriptors or do other cleanup tasks when items are no longer
- accessible. Called with `key, value`. It's called *before*
- actually removing the item from the internal cache, so if you want
- to immediately put it back in, you'll have to do that in a
- `nextTick` or `setTimeout` callback or it won't do anything.
-* `stale` By default, if you set a `maxAge`, it'll only actually pull
- stale items out of the cache when you `get(key)`. (That is, it's
- not pre-emptively doing a `setTimeout` or anything.) If you set
- `stale:true`, it'll return the stale value before deleting it. If
- you don't set this, then it'll return `undefined` when you try to
- get a stale entry, as if it had already been deleted.
-
-## API
-
-* `set(key, value, maxAge)`
-* `get(key) => value`
-
- Both of these will update the "recently used"-ness of the key.
- They do what you think. `max` is optional and overrides the
- cache `max` option if provided.
-
-* `peek(key)`
-
- Returns the key value (or `undefined` if not found) without
- updating the "recently used"-ness of the key.
-
- (If you find yourself using this a lot, you *might* be using the
- wrong sort of data structure, but there are some use cases where
- it's handy.)
-
-* `del(key)`
-
- Deletes a key out of the cache.
-
-* `reset()`
-
- Clear the cache entirely, throwing away all values.
-
-* `has(key)`
-
- Check if a key is in the cache, without updating the recent-ness
- or deleting it for being stale.
-
-* `forEach(function(value,key,cache), [thisp])`
-
- Just like `Array.prototype.forEach`. Iterates over all the keys
- in the cache, in order of recent-ness. (Ie, more recently used
- items are iterated over first.)
-
-* `keys()`
-
- Return an array of the keys in the cache.
-
-* `values()`
-
- Return an array of the values in the cache.
-
-* `length()`
-
- Return total length of objects in cache taking into account
- `length` options function.
-
-* `itemCount`
-
- Return total quantity of objects currently in cache. Note, that
- `stale` (see options) items are returned as part of this item
- count.
-
-* `dump()`
-
- Return an array of the cache entries ready for serialization and usage
- with 'destinationCache.load(arr)`.
-
-* `load(cacheEntriesArray)`
-
- Loads another cache entries array, obtained with `sourceCache.dump()`,
- into the cache. The destination cache is reset before loading new entries
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/lib/lru-cache.js b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/lib/lru-cache.js
deleted file mode 100644
index 2bbe653be8ad08..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/lib/lru-cache.js
+++ /dev/null
@@ -1,334 +0,0 @@
-;(function () { // closure for web browsers
-
-if (typeof module === 'object' && module.exports) {
- module.exports = LRUCache
-} else {
- // just set the global for non-node platforms.
- this.LRUCache = LRUCache
-}
-
-function hOP (obj, key) {
- return Object.prototype.hasOwnProperty.call(obj, key)
-}
-
-function naiveLength () { return 1 }
-
-var didTypeWarning = false
-function typeCheckKey(key) {
- if (!didTypeWarning && typeof key !== 'string' && typeof key !== 'number') {
- didTypeWarning = true
- console.error(new TypeError("LRU: key must be a string or number. Almost certainly a bug! " + typeof key).stack)
- }
-}
-
-function LRUCache (options) {
- if (!(this instanceof LRUCache))
- return new LRUCache(options)
-
- if (typeof options === 'number')
- options = { max: options }
-
- if (!options)
- options = {}
-
- this._max = options.max
- // Kind of weird to have a default max of Infinity, but oh well.
- if (!this._max || !(typeof this._max === "number") || this._max <= 0 )
- this._max = Infinity
-
- this._lengthCalculator = options.length || naiveLength
- if (typeof this._lengthCalculator !== "function")
- this._lengthCalculator = naiveLength
-
- this._allowStale = options.stale || false
- this._maxAge = options.maxAge || null
- this._dispose = options.dispose
- this.reset()
-}
-
-// resize the cache when the max changes.
-Object.defineProperty(LRUCache.prototype, "max",
- { set : function (mL) {
- if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity
- this._max = mL
- if (this._length > this._max) trim(this)
- }
- , get : function () { return this._max }
- , enumerable : true
- })
-
-// resize the cache when the lengthCalculator changes.
-Object.defineProperty(LRUCache.prototype, "lengthCalculator",
- { set : function (lC) {
- if (typeof lC !== "function") {
- this._lengthCalculator = naiveLength
- this._length = this._itemCount
- for (var key in this._cache) {
- this._cache[key].length = 1
- }
- } else {
- this._lengthCalculator = lC
- this._length = 0
- for (var key in this._cache) {
- this._cache[key].length = this._lengthCalculator(this._cache[key].value)
- this._length += this._cache[key].length
- }
- }
-
- if (this._length > this._max) trim(this)
- }
- , get : function () { return this._lengthCalculator }
- , enumerable : true
- })
-
-Object.defineProperty(LRUCache.prototype, "length",
- { get : function () { return this._length }
- , enumerable : true
- })
-
-
-Object.defineProperty(LRUCache.prototype, "itemCount",
- { get : function () { return this._itemCount }
- , enumerable : true
- })
-
-LRUCache.prototype.forEach = function (fn, thisp) {
- thisp = thisp || this
- var i = 0
- var itemCount = this._itemCount
-
- for (var k = this._mru - 1; k >= 0 && i < itemCount; k--) if (this._lruList[k]) {
- i++
- var hit = this._lruList[k]
- if (isStale(this, hit)) {
- del(this, hit)
- if (!this._allowStale) hit = undefined
- }
- if (hit) {
- fn.call(thisp, hit.value, hit.key, this)
- }
- }
-}
-
-LRUCache.prototype.keys = function () {
- var keys = new Array(this._itemCount)
- var i = 0
- for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) {
- var hit = this._lruList[k]
- keys[i++] = hit.key
- }
- return keys
-}
-
-LRUCache.prototype.values = function () {
- var values = new Array(this._itemCount)
- var i = 0
- for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) {
- var hit = this._lruList[k]
- values[i++] = hit.value
- }
- return values
-}
-
-LRUCache.prototype.reset = function () {
- if (this._dispose && this._cache) {
- for (var k in this._cache) {
- this._dispose(k, this._cache[k].value)
- }
- }
-
- this._cache = Object.create(null) // hash of items by key
- this._lruList = Object.create(null) // list of items in order of use recency
- this._mru = 0 // most recently used
- this._lru = 0 // least recently used
- this._length = 0 // number of items in the list
- this._itemCount = 0
-}
-
-LRUCache.prototype.dump = function () {
- var arr = []
- var i = 0
-
- for (var k = this._mru - 1; k >= 0 && i < this._itemCount; k--) if (this._lruList[k]) {
- var hit = this._lruList[k]
- if (!isStale(this, hit)) {
- //Do not store staled hits
- ++i
- arr.push({
- k: hit.key,
- v: hit.value,
- e: hit.now + (hit.maxAge || 0)
- });
- }
- }
- //arr has the most read first
- return arr
-}
-
-LRUCache.prototype.dumpLru = function () {
- return this._lruList
-}
-
-LRUCache.prototype.set = function (key, value, maxAge) {
- maxAge = maxAge || this._maxAge
- typeCheckKey(key)
-
- var now = maxAge ? Date.now() : 0
- var len = this._lengthCalculator(value)
-
- if (hOP(this._cache, key)) {
- if (len > this._max) {
- del(this, this._cache[key])
- return false
- }
- // dispose of the old one before overwriting
- if (this._dispose)
- this._dispose(key, this._cache[key].value)
-
- this._cache[key].now = now
- this._cache[key].maxAge = maxAge
- this._cache[key].value = value
- this._length += (len - this._cache[key].length)
- this._cache[key].length = len
- this.get(key)
-
- if (this._length > this._max)
- trim(this)
-
- return true
- }
-
- var hit = new Entry(key, value, this._mru++, len, now, maxAge)
-
- // oversized objects fall out of cache automatically.
- if (hit.length > this._max) {
- if (this._dispose) this._dispose(key, value)
- return false
- }
-
- this._length += hit.length
- this._lruList[hit.lu] = this._cache[key] = hit
- this._itemCount ++
-
- if (this._length > this._max)
- trim(this)
-
- return true
-}
-
-LRUCache.prototype.has = function (key) {
- typeCheckKey(key)
- if (!hOP(this._cache, key)) return false
- var hit = this._cache[key]
- if (isStale(this, hit)) {
- return false
- }
- return true
-}
-
-LRUCache.prototype.get = function (key) {
- typeCheckKey(key)
- return get(this, key, true)
-}
-
-LRUCache.prototype.peek = function (key) {
- typeCheckKey(key)
- return get(this, key, false)
-}
-
-LRUCache.prototype.pop = function () {
- var hit = this._lruList[this._lru]
- del(this, hit)
- return hit || null
-}
-
-LRUCache.prototype.del = function (key) {
- typeCheckKey(key)
- del(this, this._cache[key])
-}
-
-LRUCache.prototype.load = function (arr) {
- //reset the cache
- this.reset();
-
- var now = Date.now()
- //A previous serialized cache has the most recent items first
- for (var l = arr.length - 1; l >= 0; l-- ) {
- var hit = arr[l]
- typeCheckKey(hit.k)
- var expiresAt = hit.e || 0
- if (expiresAt === 0) {
- //the item was created without expiration in a non aged cache
- this.set(hit.k, hit.v)
- } else {
- var maxAge = expiresAt - now
- //dont add already expired items
- if (maxAge > 0) this.set(hit.k, hit.v, maxAge)
- }
- }
-}
-
-function get (self, key, doUse) {
- typeCheckKey(key)
- var hit = self._cache[key]
- if (hit) {
- if (isStale(self, hit)) {
- del(self, hit)
- if (!self._allowStale) hit = undefined
- } else {
- if (doUse) use(self, hit)
- }
- if (hit) hit = hit.value
- }
- return hit
-}
-
-function isStale(self, hit) {
- if (!hit || (!hit.maxAge && !self._maxAge)) return false
- var stale = false;
- var diff = Date.now() - hit.now
- if (hit.maxAge) {
- stale = diff > hit.maxAge
- } else {
- stale = self._maxAge && (diff > self._maxAge)
- }
- return stale;
-}
-
-function use (self, hit) {
- shiftLU(self, hit)
- hit.lu = self._mru ++
- self._lruList[hit.lu] = hit
-}
-
-function trim (self) {
- while (self._lru < self._mru && self._length > self._max)
- del(self, self._lruList[self._lru])
-}
-
-function shiftLU (self, hit) {
- delete self._lruList[ hit.lu ]
- while (self._lru < self._mru && !self._lruList[self._lru]) self._lru ++
-}
-
-function del (self, hit) {
- if (hit) {
- if (self._dispose) self._dispose(hit.key, hit.value)
- self._length -= hit.length
- self._itemCount --
- delete self._cache[ hit.key ]
- shiftLU(self, hit)
- }
-}
-
-// classy, since V8 prefers predictable objects.
-function Entry (key, value, lu, length, now, maxAge) {
- this.key = key
- this.value = value
- this.lu = lu
- this.length = length
- this.now = now
- if (maxAge) this.maxAge = maxAge
-}
-
-})()
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/package.json b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/package.json
deleted file mode 100644
index 576b8dd894b5b2..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
- "name": "lru-cache",
- "description": "A cache object that deletes the least-recently-used items.",
- "version": "2.7.3",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me"
- },
- "keywords": [
- "mru",
- "lru",
- "cache"
- ],
- "scripts": {
- "test": "tap test --gc"
- },
- "main": "lib/lru-cache.js",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-lru-cache.git"
- },
- "devDependencies": {
- "tap": "^1.2.0",
- "weak": ""
- },
- "license": "ISC",
- "gitHead": "292048199f6d28b77fbe584279a1898e25e4c714",
- "bugs": {
- "url": "https://github.com/isaacs/node-lru-cache/issues"
- },
- "homepage": "https://github.com/isaacs/node-lru-cache#readme",
- "_id": "lru-cache@2.7.3",
- "_shasum": "6d4524e8b955f95d4f5b58851ce21dd72fb4e952",
- "_from": "lru-cache@>=2.6.5 <3.0.0",
- "_npmVersion": "3.3.2",
- "_nodeVersion": "4.0.0",
- "_npmUser": {
- "name": "isaacs",
- "email": "i@izs.me"
- },
- "dist": {
- "shasum": "6d4524e8b955f95d4f5b58851ce21dd72fb4e952",
- "tarball": "http://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "isaacs@npmjs.com"
- },
- {
- "name": "othiym23",
- "email": "ogd@aoaioxxysz.net"
- }
- ],
- "directories": {},
- "_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz",
- "readme": "ERROR: No README data found!"
-}
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/basic.js b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/basic.js
deleted file mode 100644
index b47225f109891f..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/basic.js
+++ /dev/null
@@ -1,396 +0,0 @@
-var test = require("tap").test
- , LRU = require("../")
-
-test("basic", function (t) {
- var cache = new LRU({max: 10})
- cache.set("key", "value")
- t.equal(cache.get("key"), "value")
- t.equal(cache.get("nada"), undefined)
- t.equal(cache.length, 1)
- t.equal(cache.max, 10)
- t.end()
-})
-
-test("least recently set", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("lru recently gotten", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.get("a")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("a"), "A")
- t.end()
-})
-
-test("del", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.del("a")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("max", function (t) {
- var cache = new LRU(3)
-
- // test changing the max, verify that the LRU items get dropped.
- cache.max = 100
- for (var i = 0; i < 100; i ++) cache.set(i, i)
- t.equal(cache.length, 100)
- for (var i = 0; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- cache.max = 3
- t.equal(cache.length, 3)
- for (var i = 0; i < 97; i ++) {
- t.equal(cache.get(i), undefined)
- }
- for (var i = 98; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
-
- // now remove the max restriction, and try again.
- cache.max = "hello"
- for (var i = 0; i < 100; i ++) cache.set(i, i)
- t.equal(cache.length, 100)
- for (var i = 0; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- // should trigger an immediate resize
- cache.max = 3
- t.equal(cache.length, 3)
- for (var i = 0; i < 97; i ++) {
- t.equal(cache.get(i), undefined)
- }
- for (var i = 98; i < 100; i ++) {
- t.equal(cache.get(i), i)
- }
- t.end()
-})
-
-test("reset", function (t) {
- var cache = new LRU(10)
- cache.set("a", "A")
- cache.set("b", "B")
- cache.reset()
- t.equal(cache.length, 0)
- t.equal(cache.max, 10)
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.end()
-})
-
-
-test("basic with weighed length", function (t) {
- var cache = new LRU({
- max: 100,
- length: function (item) { return item.size }
- })
- cache.set("key", {val: "value", size: 50})
- t.equal(cache.get("key").val, "value")
- t.equal(cache.get("nada"), undefined)
- t.equal(cache.lengthCalculator(cache.get("key")), 50)
- t.equal(cache.length, 50)
- t.equal(cache.max, 100)
- t.end()
-})
-
-
-test("weighed length item too large", function (t) {
- var cache = new LRU({
- max: 10,
- length: function (item) { return item.size }
- })
- t.equal(cache.max, 10)
-
- // should fall out immediately
- cache.set("key", {val: "value", size: 50})
-
- t.equal(cache.length, 0)
- t.equal(cache.get("key"), undefined)
- t.end()
-})
-
-test("least recently set with weighed length", function (t) {
- var cache = new LRU({
- max:8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- cache.set("d", "DDDD")
- t.equal(cache.get("d"), "DDDD")
- t.equal(cache.get("c"), "CCC")
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("lru recently gotten with weighed length", function (t) {
- var cache = new LRU({
- max: 8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- cache.get("a")
- cache.get("b")
- cache.set("d", "DDDD")
- t.equal(cache.get("c"), undefined)
- t.equal(cache.get("d"), "DDDD")
- t.equal(cache.get("b"), "BB")
- t.equal(cache.get("a"), "A")
- t.end()
-})
-
-test("lru recently updated with weighed length", function (t) {
- var cache = new LRU({
- max: 8,
- length: function (item) { return item.length }
- })
- cache.set("a", "A")
- cache.set("b", "BB")
- cache.set("c", "CCC")
- t.equal(cache.length, 6) //CCC BB A
- cache.set("a", "+A")
- t.equal(cache.length, 7) //+A CCC BB
- cache.set("b", "++BB")
- t.equal(cache.length, 6) //++BB +A
- t.equal(cache.get("c"), undefined)
-
- cache.set("c", "oversized")
- t.equal(cache.length, 6) //++BB +A
- t.equal(cache.get("c"), undefined)
-
- cache.set("a", "oversized")
- t.equal(cache.length, 4) //++BB
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), "++BB")
- t.end()
-})
-
-test("set returns proper booleans", function(t) {
- var cache = new LRU({
- max: 5,
- length: function (item) { return item.length }
- })
-
- t.equal(cache.set("a", "A"), true)
-
- // should return false for max exceeded
- t.equal(cache.set("b", "donuts"), false)
-
- t.equal(cache.set("b", "B"), true)
- t.equal(cache.set("c", "CCCC"), true)
- t.end()
-})
-
-test("drop the old items", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- cache.set("a", "A")
-
- setTimeout(function () {
- cache.set("b", "b")
- t.equal(cache.get("a"), "A")
- }, 25)
-
- setTimeout(function () {
- cache.set("c", "C")
- // timed out
- t.notOk(cache.get("a"))
- }, 60 + 25)
-
- setTimeout(function () {
- t.notOk(cache.get("b"))
- t.equal(cache.get("c"), "C")
- }, 90)
-
- setTimeout(function () {
- t.notOk(cache.get("c"))
- t.end()
- }, 155)
-})
-
-test("individual item can have it's own maxAge", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- cache.set("a", "A", 20)
- setTimeout(function () {
- t.notOk(cache.get("a"))
- t.end()
- }, 25)
-})
-
-test("individual item can have it's own maxAge > cache's", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 20
- })
-
- cache.set("a", "A", 50)
- setTimeout(function () {
- t.equal(cache.get("a"), "A")
- t.end()
- }, 25)
-})
-
-test("disposal function", function(t) {
- var disposed = false
- var cache = new LRU({
- max: 1,
- dispose: function (k, n) {
- disposed = n
- }
- })
-
- cache.set(1, 1)
- cache.set(2, 2)
- t.equal(disposed, 1)
- cache.set(3, 3)
- t.equal(disposed, 2)
- cache.reset()
- t.equal(disposed, 3)
- t.end()
-})
-
-test("disposal function on too big of item", function(t) {
- var disposed = false
- var cache = new LRU({
- max: 1,
- length: function (k) {
- return k.length
- },
- dispose: function (k, n) {
- disposed = n
- }
- })
- var obj = [ 1, 2 ]
-
- t.equal(disposed, false)
- cache.set("obj", obj)
- t.equal(disposed, obj)
- t.end()
-})
-
-test("has()", function(t) {
- var cache = new LRU({
- max: 1,
- maxAge: 10
- })
-
- cache.set('foo', 'bar')
- t.equal(cache.has('foo'), true)
- cache.set('blu', 'baz')
- t.equal(cache.has('foo'), false)
- t.equal(cache.has('blu'), true)
- setTimeout(function() {
- t.equal(cache.has('blu'), false)
- t.end()
- }, 15)
-})
-
-test("stale", function(t) {
- var cache = new LRU({
- maxAge: 10,
- stale: true
- })
-
- cache.set('foo', 'bar')
- t.equal(cache.get('foo'), 'bar')
- t.equal(cache.has('foo'), true)
- setTimeout(function() {
- t.equal(cache.has('foo'), false)
- t.equal(cache.get('foo'), 'bar')
- t.equal(cache.get('foo'), undefined)
- t.end()
- }, 15)
-})
-
-test("lru update via set", function(t) {
- var cache = LRU({ max: 2 });
-
- cache.set('foo', 1);
- cache.set('bar', 2);
- cache.del('bar');
- cache.set('baz', 3);
- cache.set('qux', 4);
-
- t.equal(cache.get('foo'), undefined)
- t.equal(cache.get('bar'), undefined)
- t.equal(cache.get('baz'), 3)
- t.equal(cache.get('qux'), 4)
- t.end()
-})
-
-test("least recently set w/ peek", function (t) {
- var cache = new LRU(2)
- cache.set("a", "A")
- cache.set("b", "B")
- t.equal(cache.peek("a"), "A")
- cache.set("c", "C")
- t.equal(cache.get("c"), "C")
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("a"), undefined)
- t.end()
-})
-
-test("pop the least used item", function (t) {
- var cache = new LRU(3)
- , last
-
- cache.set("a", "A")
- cache.set("b", "B")
- cache.set("c", "C")
-
- t.equal(cache.length, 3)
- t.equal(cache.max, 3)
-
- // Ensure we pop a, c, b
- cache.get("b", "B")
-
- last = cache.pop()
- t.equal(last.key, "a")
- t.equal(last.value, "A")
- t.equal(cache.length, 2)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last.key, "c")
- t.equal(last.value, "C")
- t.equal(cache.length, 1)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last.key, "b")
- t.equal(last.value, "B")
- t.equal(cache.length, 0)
- t.equal(cache.max, 3)
-
- last = cache.pop()
- t.equal(last, null)
- t.equal(cache.length, 0)
- t.equal(cache.max, 3)
-
- t.end()
-})
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/foreach.js b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/foreach.js
deleted file mode 100644
index 4190417cbc61d8..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/foreach.js
+++ /dev/null
@@ -1,120 +0,0 @@
-var test = require('tap').test
-var LRU = require('../')
-
-test('forEach', function (t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- var i = 9
- l.forEach(function (val, key, cache) {
- t.equal(cache, l)
- t.equal(key, i.toString())
- t.equal(val, i.toString(2))
- i -= 1
- })
-
- // get in order of most recently used
- l.get(6)
- l.get(8)
-
- var order = [ 8, 6, 9, 7, 5 ]
- var i = 0
-
- l.forEach(function (val, key, cache) {
- var j = order[i ++]
- t.equal(cache, l)
- t.equal(key, j.toString())
- t.equal(val, j.toString(2))
- })
- t.equal(i, order.length);
-
- t.end()
-})
-
-test('keys() and values()', function (t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- t.similar(l.keys(), ['9', '8', '7', '6', '5'])
- t.similar(l.values(), ['1001', '1000', '111', '110', '101'])
-
- // get in order of most recently used
- l.get(6)
- l.get(8)
-
- t.similar(l.keys(), ['8', '6', '9', '7', '5'])
- t.similar(l.values(), ['1000', '110', '1001', '111', '101'])
-
- t.end()
-})
-
-test('all entries are iterated over', function(t) {
- var l = new LRU(5)
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- var i = 0
- l.forEach(function (val, key, cache) {
- if (i > 0) {
- cache.del(key)
- }
- i += 1
- })
-
- t.equal(i, 5)
- t.equal(l.keys().length, 1)
-
- t.end()
-})
-
-test('all stale entries are removed', function(t) {
- var l = new LRU({ max: 5, maxAge: -5, stale: true })
- for (var i = 0; i < 10; i ++) {
- l.set(i.toString(), i.toString(2))
- }
-
- var i = 0
- l.forEach(function () {
- i += 1
- })
-
- t.equal(i, 5)
- t.equal(l.keys().length, 0)
-
- t.end()
-})
-
-test('expires', function (t) {
- var l = new LRU({
- max: 10,
- maxAge: 50
- })
- for (var i = 0; i < 10; i++) {
- l.set(i.toString(), i.toString(2), ((i % 2) ? 25 : undefined))
- }
-
- var i = 0
- var order = [ 8, 6, 4, 2, 0 ]
- setTimeout(function () {
- l.forEach(function (val, key, cache) {
- var j = order[i++]
- t.equal(cache, l)
- t.equal(key, j.toString())
- t.equal(val, j.toString(2))
- })
- t.equal(i, order.length);
-
- setTimeout(function () {
- var count = 0;
- l.forEach(function (val, key, cache) { count++; })
- t.equal(0, count);
- t.end()
- }, 25)
-
- }, 26)
-})
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/memory-leak.js b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/memory-leak.js
deleted file mode 100644
index b5912f6f168e5e..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/memory-leak.js
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env node --expose_gc
-
-
-var weak = require('weak');
-var test = require('tap').test
-var LRU = require('../')
-var l = new LRU({ max: 10 })
-var refs = 0
-function X() {
- refs ++
- weak(this, deref)
-}
-
-function deref() {
- refs --
-}
-
-test('no leaks', function (t) {
- // fill up the cache
- for (var i = 0; i < 100; i++) {
- l.set(i, new X);
- // throw some gets in there, too.
- if (i % 2 === 0)
- l.get(i / 2)
- }
-
- gc()
-
- var start = process.memoryUsage()
-
- // capture the memory
- var startRefs = refs
-
- // do it again, but more
- for (var i = 0; i < 10000; i++) {
- l.set(i, new X);
- // throw some gets in there, too.
- if (i % 2 === 0)
- l.get(i / 2)
- }
-
- gc()
-
- var end = process.memoryUsage()
- t.equal(refs, startRefs, 'no leaky refs')
-
- console.error('start: %j\n' +
- 'end: %j', start, end);
- t.pass();
- t.end();
-})
diff --git a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/serialize.js b/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/serialize.js
deleted file mode 100644
index 5fe5dc3d371f1e..00000000000000
--- a/deps/npm/node_modules/request/node_modules/aws4/node_modules/lru-cache/test/serialize.js
+++ /dev/null
@@ -1,215 +0,0 @@
-var test = require('tap').test
-var LRU = require('../')
-
-test('dump', function (t) {
- var cache = new LRU()
-
- t.equal(cache.dump().length, 0, "nothing in dump for empty cache")
-
- cache.set("a", "A")
- cache.set("b", "B")
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 },
- { k: "a", v: "A", e: 0 }
- ])
-
- cache.set("a", "A");
- t.deepEqual(cache.dump(), [
- { k: "a", v: "A", e: 0 },
- { k: "b", v: "B", e: 0 }
- ])
-
- cache.get("b");
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 },
- { k: "a", v: "A", e: 0 }
- ])
-
- cache.del("a");
- t.deepEqual(cache.dump(), [
- { k: "b", v: "B", e: 0 }
- ])
-
- t.end()
-})
-
-test("do not dump stale items", function(t) {
- var cache = new LRU({
- max: 5,
- maxAge: 50
- })
-
- //expires at 50
- cache.set("a", "A")
-
- setTimeout(function () {
- //expires at 75
- cache.set("b", "B")
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "b")
- t.equal(s[1].k, "a")
- }, 25)
-
- setTimeout(function () {
- //expires at 110
- cache.set("c", "C")
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "c")
- t.equal(s[1].k, "b")
- }, 60)
-
- setTimeout(function () {
- //expires at 130
- cache.set("d", "D", 40)
- var s = cache.dump()
- t.equal(s.length, 2)
- t.equal(s[0].k, "d")
- t.equal(s[1].k, "c")
- }, 90)
-
- setTimeout(function () {
- var s = cache.dump()
- t.equal(s.length, 1)
- t.equal(s[0].k, "d")
- }, 120)
-
- setTimeout(function () {
- var s = cache.dump()
- t.deepEqual(s, [])
- t.end()
- }, 155)
-})
-
-test("load basic cache", function(t) {
- var cache = new LRU(),
- copy = new LRU()
-
- cache.set("a", "A")
- cache.set("b", "B")
-
- copy.load(cache.dump())
- t.deepEquals(cache.dump(), copy.dump())
-
- t.end()
-})
-
-
-test("load staled cache", function(t) {
- var cache = new LRU({maxAge: 50}),
- copy = new LRU({maxAge: 50}),
- arr
-
- //expires at 50
- cache.set("a", "A")
- setTimeout(function () {
- //expires at 80
- cache.set("b", "B")
- arr = cache.dump()
- t.equal(arr.length, 2)
- }, 30)
-
- setTimeout(function () {
- copy.load(arr)
- t.equal(copy.get("a"), undefined)
- t.equal(copy.get("b"), "B")
- }, 60)
-
- setTimeout(function () {
- t.equal(copy.get("b"), undefined)
- t.end()
- }, 90)
-})
-
-test("load to other size cache", function(t) {
- var cache = new LRU({max: 2}),
- copy = new LRU({max: 1})
-
- cache.set("a", "A")
- cache.set("b", "B")
-
- copy.load(cache.dump())
- t.equal(copy.get("a"), undefined)
- t.equal(copy.get("b"), "B")
-
- //update the last read from original cache
- cache.get("a")
- copy.load(cache.dump())
- t.equal(copy.get("a"), "A")
- t.equal(copy.get("b"), undefined)
-
- t.end()
-})
-
-
-test("load to other age cache", function(t) {
- var cache = new LRU({maxAge: 50}),
- aged = new LRU({maxAge: 100}),
- simple = new LRU(),
- arr,
- expired
-
- //created at 0
- //a would be valid till 0 + 50
- cache.set("a", "A")
- setTimeout(function () {
- //created at 20
- //b would be valid till 20 + 50
- cache.set("b", "B")
- //b would be valid till 20 + 70
- cache.set("c", "C", 70)
- arr = cache.dump()
- t.equal(arr.length, 3)
- }, 20)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), "B")
- t.equal(cache.get("c"), "C")
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), "B")
- t.equal(aged.get("c"), "C")
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), "B")
- t.equal(simple.get("c"), "C")
- }, 60)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("c"), "C")
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), undefined)
- t.equal(aged.get("c"), "C")
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), undefined)
- t.equal(simple.get("c"), "C")
- }, 80)
-
- setTimeout(function () {
- t.equal(cache.get("a"), undefined)
- t.equal(cache.get("b"), undefined)
- t.equal(cache.get("c"), undefined)
-
- aged.load(arr)
- t.equal(aged.get("a"), undefined)
- t.equal(aged.get("b"), undefined)
- t.equal(aged.get("c"), undefined)
-
- simple.load(arr)
- t.equal(simple.get("a"), undefined)
- t.equal(simple.get("b"), undefined)
- t.equal(simple.get("c"), undefined)
- t.end()
- }, 100)
-
-})
diff --git a/deps/npm/node_modules/request/node_modules/aws4/package.json b/deps/npm/node_modules/request/node_modules/aws4/package.json
index 38f9c00ca57ed2..d0c94cc307d32f 100644
--- a/deps/npm/node_modules/request/node_modules/aws4/package.json
+++ b/deps/npm/node_modules/request/node_modules/aws4/package.json
@@ -1,6 +1,6 @@
{
"name": "aws4",
- "version": "1.2.1",
+ "version": "1.3.2",
"description": "Signs and prepares requests using AWS Signature Version 4",
"author": {
"name": "Michael Hart",
@@ -66,25 +66,25 @@
},
"license": "MIT",
"dependencies": {
- "lru-cache": "^2.6.5"
+ "lru-cache": "^4.0.0"
},
"devDependencies": {
- "mocha": "^2.2.5",
- "should": "^7.0.1"
+ "mocha": "^2.4.5",
+ "should": "^8.2.2"
},
"scripts": {
"test": "mocha ./test/fast.js ./test/slow.js -b -t 100s -R list"
},
- "gitHead": "3d8a3a06a8415bd5255b4f60eb91576952e97f5c",
+ "gitHead": "e899db3aacbf2a034398f7dd483345dc793e8d72",
"bugs": {
"url": "https://github.com/mhart/aws4/issues"
},
"homepage": "https://github.com/mhart/aws4#readme",
- "_id": "aws4@1.2.1",
- "_shasum": "52b5659a4d32583d405f65e1124ac436d07fe5ac",
+ "_id": "aws4@1.3.2",
+ "_shasum": "d39e0bee412ced0e8ed94a23e314f313a95b9fd1",
"_from": "aws4@>=1.2.1 <2.0.0",
- "_npmVersion": "2.14.15",
- "_nodeVersion": "4.2.4",
+ "_npmVersion": "2.14.21",
+ "_nodeVersion": "4.3.1",
"_npmUser": {
"name": "hichaelmart",
"email": "michael.hart.au@gmail.com"
@@ -96,9 +96,14 @@
}
],
"dist": {
- "shasum": "52b5659a4d32583d405f65e1124ac436d07fe5ac",
- "tarball": "http://registry.npmjs.org/aws4/-/aws4-1.2.1.tgz"
+ "shasum": "d39e0bee412ced0e8ed94a23e314f313a95b9fd1",
+ "tarball": "http://registry.npmjs.org/aws4/-/aws4-1.3.2.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/aws4-1.3.2.tgz_1456871543899_0.13955276948399842"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.2.1.tgz"
+ "_resolved": "https://registry.npmjs.org/aws4/-/aws4-1.3.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/README.md b/deps/npm/node_modules/request/node_modules/bl/README.md
index 4d87866aa19525..f7044db26e8659 100644
--- a/deps/npm/node_modules/request/node_modules/bl/README.md
+++ b/deps/npm/node_modules/request/node_modules/bl/README.md
@@ -96,7 +96,7 @@ bl.pipe(fs.createWriteStream('gibberish.txt'))
--------------------------------------------------------
-### new BufferList([ callback | buffer | buffer array ])
+### new BufferList([ callback | Buffer | Buffer array | BufferList | BufferList array | String ])
The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream.
Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object.
@@ -120,8 +120,8 @@ Get the length of the list in bytes. This is the sum of the lengths of all of th
--------------------------------------------------------
-### bl.append(buffer)
-`append(buffer)` adds an additional buffer or BufferList to the internal list.
+### bl.append(Buffer | Buffer array | BufferList | BufferList array | String)
+`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained.
--------------------------------------------------------
diff --git a/deps/npm/node_modules/request/node_modules/bl/bl.js b/deps/npm/node_modules/request/node_modules/bl/bl.js
index b979ba82e434d2..f585df1721798f 100644
--- a/deps/npm/node_modules/request/node_modules/bl/bl.js
+++ b/deps/npm/node_modules/request/node_modules/bl/bl.js
@@ -1,6 +1,7 @@
var DuplexStream = require('readable-stream/duplex')
, util = require('util')
+
function BufferList (callback) {
if (!(this instanceof BufferList))
return new BufferList(callback)
@@ -11,34 +12,31 @@ function BufferList (callback) {
if (typeof callback == 'function') {
this._callback = callback
- var piper = function (err) {
+ var piper = function piper (err) {
if (this._callback) {
this._callback(err)
this._callback = null
}
}.bind(this)
- this.on('pipe', function (src) {
+ this.on('pipe', function onPipe (src) {
src.on('error', piper)
})
- this.on('unpipe', function (src) {
+ this.on('unpipe', function onUnpipe (src) {
src.removeListener('error', piper)
})
- }
- else if (Buffer.isBuffer(callback))
+ } else {
this.append(callback)
- else if (Array.isArray(callback)) {
- callback.forEach(function (b) {
- Buffer.isBuffer(b) && this.append(b)
- }.bind(this))
}
DuplexStream.call(this)
}
+
util.inherits(BufferList, DuplexStream)
-BufferList.prototype._offset = function (offset) {
+
+BufferList.prototype._offset = function _offset (offset) {
var tot = 0, i = 0, _t
for (; i < this._bufs.length; i++) {
_t = tot + this._bufs[i].length
@@ -48,35 +46,52 @@ BufferList.prototype._offset = function (offset) {
}
}
-BufferList.prototype.append = function (buf) {
- var isBuffer = Buffer.isBuffer(buf) ||
- buf instanceof BufferList
- // coerce number arguments to strings, since Buffer(number) does
- // uninitialized memory allocation
- if (typeof buf == 'number')
- buf = buf.toString()
+BufferList.prototype.append = function append (buf) {
+ var i = 0
+ , newBuf
+
+ if (Array.isArray(buf)) {
+ for (; i < buf.length; i++)
+ this.append(buf[i])
+ } else if (buf instanceof BufferList) {
+ // unwrap argument into individual BufferLists
+ for (; i < buf._bufs.length; i++)
+ this.append(buf._bufs[i])
+ } else if (buf != null) {
+ // coerce number arguments to strings, since Buffer(number) does
+ // uninitialized memory allocation
+ if (typeof buf == 'number')
+ buf = buf.toString()
+
+ newBuf = Buffer.isBuffer(buf) ? buf : new Buffer(buf)
+ this._bufs.push(newBuf)
+ this.length += newBuf.length
+ }
- this._bufs.push(isBuffer ? buf : new Buffer(buf))
- this.length += buf.length
return this
}
-BufferList.prototype._write = function (buf, encoding, callback) {
+
+BufferList.prototype._write = function _write (buf, encoding, callback) {
this.append(buf)
- if (callback)
+
+ if (typeof callback == 'function')
callback()
}
-BufferList.prototype._read = function (size) {
+
+BufferList.prototype._read = function _read (size) {
if (!this.length)
return this.push(null)
+
size = Math.min(size, this.length)
this.push(this.slice(0, size))
this.consume(size)
}
-BufferList.prototype.end = function (chunk) {
+
+BufferList.prototype.end = function end (chunk) {
DuplexStream.prototype.end.call(this, chunk)
if (this._callback) {
@@ -85,15 +100,18 @@ BufferList.prototype.end = function (chunk) {
}
}
-BufferList.prototype.get = function (index) {
+
+BufferList.prototype.get = function get (index) {
return this.slice(index, index + 1)[0]
}
-BufferList.prototype.slice = function (start, end) {
+
+BufferList.prototype.slice = function slice (start, end) {
return this.copy(null, 0, start, end)
}
-BufferList.prototype.copy = function (dst, dstStart, srcStart, srcEnd) {
+
+BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
if (typeof srcStart != 'number' || srcStart < 0)
srcStart = 0
if (typeof srcEnd != 'number' || srcEnd > this.length)
@@ -156,13 +174,13 @@ BufferList.prototype.copy = function (dst, dstStart, srcStart, srcEnd) {
return dst
}
-BufferList.prototype.toString = function (encoding, start, end) {
+BufferList.prototype.toString = function toString (encoding, start, end) {
return this.slice(start, end).toString(encoding)
}
-BufferList.prototype.consume = function (bytes) {
+BufferList.prototype.consume = function consume (bytes) {
while (this._bufs.length) {
- if (bytes > this._bufs[0].length) {
+ if (bytes >= this._bufs[0].length) {
bytes -= this._bufs[0].length
this.length -= this._bufs[0].length
this._bufs.shift()
@@ -175,7 +193,8 @@ BufferList.prototype.consume = function (bytes) {
return this
}
-BufferList.prototype.duplicate = function () {
+
+BufferList.prototype.duplicate = function duplicate () {
var i = 0
, copy = new BufferList()
@@ -185,12 +204,14 @@ BufferList.prototype.duplicate = function () {
return copy
}
-BufferList.prototype.destroy = function () {
- this._bufs.length = 0;
- this.length = 0;
- this.push(null);
+
+BufferList.prototype.destroy = function destroy () {
+ this._bufs.length = 0
+ this.length = 0
+ this.push(null)
}
+
;(function () {
var methods = {
'readDoubleBE' : 8
@@ -218,4 +239,5 @@ BufferList.prototype.destroy = function () {
}
}())
+
module.exports = BufferList
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml
index cfe1c9439388b6..1b82118460cfe4 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/.travis.yml
@@ -6,6 +6,10 @@ before_install:
notifications:
email: false
matrix:
+ fast_finish: true
+ allow_failures:
+ - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
include:
- node_js: '0.8'
env: TASK=test
@@ -25,24 +29,22 @@ matrix:
env: TASK=test
- node_js: 5
env: TASK=test
- - node_js: node
- env: TASK=test
- - node_js: node
- env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
- - node_js: node
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+ - node_js: 5
env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
- - node_js: node
- env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="41..beta"
- - node_js: node
- env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="36..latest"
- - node_js: node
- env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="['6.1', '7.1', '8.2']"
- - node_js: node
- env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="['6.1', '7.1', '8.2']"
- - node_js: node
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
+ - node_js: 5
env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
- - node_js: node
- env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
script: "npm run $TASK"
env:
global:
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
index 77fafa3da15f9f..1a67c48cd031b5 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/README.md
@@ -1,6 +1,6 @@
# readable-stream
-***Node-core streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
+***Node-core v5.8.0 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
[](https://nodei.co/npm/readable-stream/)
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown
index 3988c0cbd0d9c9..0bc3819e63b025 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/doc/stream.markdown
@@ -3,27 +3,25 @@
Stability: 2 - Stable
A stream is an abstract interface implemented by various objects in
-Node.js. For example a [request to an HTTP server][] is a stream, as is
-[stdout][]. Streams are readable, writable, or both. All streams are
-instances of [EventEmitter][]
+Node.js. For example a [request to an HTTP server][http-incoming-message] is a
+stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All
+streams are instances of [`EventEmitter`][].
You can load the Stream base classes by doing `require('stream')`.
There are base classes provided for [Readable][] streams, [Writable][]
streams, [Duplex][] streams, and [Transform][] streams.
-This document is split up into 3 sections. The first explains the
-parts of the API that you need to be aware of to use streams in your
-programs. If you never implement a streaming API yourself, you can
-stop there.
+This document is split up into 3 sections:
-The second section explains the parts of the API that you need to use
-if you implement your own custom streams yourself. The API is
-designed to make this easy for you to do.
-
-The third section goes into more depth about how streams work,
-including some of the internal mechanisms and functions that you
-should probably not modify unless you definitely know what you are
-doing.
+1. The first section explains the parts of the API that you need to be
+ aware of to use streams in your programs.
+2. The second section explains the parts of the API that you need to
+ use if you implement your own custom streams yourself. The API is designed to
+ make this easy for you to do.
+3. The third section goes into more depth about how streams work,
+ including some of the internal mechanisms and functions that you
+ should probably not modify unless you definitely know what you are
+ doing.
## API for Stream Consumers
@@ -37,22 +35,22 @@ and properties depending on whether they are Readable, Writable, or
Duplex.
If a stream is both Readable and Writable, then it implements all of
-the methods and events below. So, a [Duplex][] or [Transform][] stream is
+the methods and events. So, a [Duplex][] or [Transform][] stream is
fully described by this API, though their implementation may be
somewhat different.
It is not necessary to implement Stream interfaces in order to consume
-streams in your programs. If you **are** implementing streaming
+streams in your programs. If you **are** implementing streaming
interfaces in your own program, please also refer to
-[API for Stream Implementors][] below.
+[API for Stream Implementors][].
Almost all Node.js programs, no matter how simple, use Streams in some
way. Here is an example of using Streams in an Node.js program:
-```javascript
-var http = require('http');
+```js
+const http = require('http');
-var server = http.createServer(function (req, res) {
+var server = http.createServer( (req, res) => {
// req is an http.IncomingMessage, which is a Readable Stream
// res is an http.ServerResponse, which is a Writable Stream
@@ -62,18 +60,18 @@ var server = http.createServer(function (req, res) {
req.setEncoding('utf8');
// Readable streams emit 'data' events once a listener is added
- req.on('data', function (chunk) {
+ req.on('data', (chunk) => {
body += chunk;
});
// the end event tells you that you have entire body
- req.on('end', function () {
+ req.on('end', () => {
try {
var data = JSON.parse(body);
} catch (er) {
// uh oh! bad json!
res.statusCode = 400;
- return res.end('error: ' + er.message);
+ return res.end(`error: ${er.message}`);
}
// write back something interesting to the user:
@@ -95,65 +93,66 @@ server.listen(1337);
### Class: stream.Duplex
Duplex streams are streams that implement both the [Readable][] and
-[Writable][] interfaces. See above for usage.
+[Writable][] interfaces.
Examples of Duplex streams include:
-* [tcp sockets][]
-* [zlib streams][]
-* [crypto streams][]
+* [TCP sockets][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
### Class: stream.Readable
The Readable stream interface is the abstraction for a *source* of
-data that you are reading from. In other words, data comes *out* of a
+data that you are reading from. In other words, data comes *out* of a
Readable stream.
A Readable stream will not start emitting data until you indicate that
you are ready to receive it.
Readable streams have two "modes": a **flowing mode** and a **paused
-mode**. When in flowing mode, data is read from the underlying system
-and provided to your program as fast as possible. In paused mode, you
-must explicitly call `stream.read()` to get chunks of data out.
+mode**. When in flowing mode, data is read from the underlying system
+and provided to your program as fast as possible. In paused mode, you
+must explicitly call [`stream.read()`][stream-read] to get chunks of data out.
Streams start out in paused mode.
**Note**: If no data event handlers are attached, and there are no
-[`pipe()`][] destinations, and the stream is switched into flowing
+[`stream.pipe()`][] destinations, and the stream is switched into flowing
mode, then data will be lost.
You can switch to flowing mode by doing any of the following:
-* Adding a [`'data'` event][] handler to listen for data.
-* Calling the [`resume()`][] method to explicitly open the flow.
-* Calling the [`pipe()`][] method to send the data to a [Writable][].
+* Adding a [`'data'`][] event handler to listen for data.
+* Calling the [`stream.resume()`][stream-resume] method to explicitly open the
+ flow.
+* Calling the [`stream.pipe()`][] method to send the data to a [Writable][].
You can switch back to paused mode by doing either of the following:
-* If there are no pipe destinations, by calling the [`pause()`][]
- method.
-* If there are pipe destinations, by removing any [`'data'` event][]
+* If there are no pipe destinations, by calling the
+ [`stream.pause()`][stream-pause] method.
+* If there are pipe destinations, by removing any [`'data'`][] event
handlers, and removing all pipe destinations by calling the
- [`unpipe()`][] method.
+ [`stream.unpipe()`][] method.
-Note that, for backwards compatibility reasons, removing `'data'`
-event handlers will **not** automatically pause the stream. Also, if
-there are piped destinations, then calling `pause()` will not
-guarantee that the stream will *remain* paused once those
+Note that, for backwards compatibility reasons, removing [`'data'`][]
+event handlers will **not** automatically pause the stream. Also, if
+there are piped destinations, then calling [`stream.pause()`][stream-pause] will
+not guarantee that the stream will *remain* paused once those
destinations drain and ask for more data.
Examples of readable streams include:
-* [http responses, on the client][]
-* [http requests, on the server][]
+* [HTTP responses, on the client][http-incoming-message]
+* [HTTP requests, on the server][http-incoming-message]
* [fs read streams][]
-* [zlib streams][]
-* [crypto streams][]
-* [tcp sockets][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+* [TCP sockets][]
* [child process stdout and stderr][]
-* [process.stdin][]
+* [`process.stdin`][]
#### Event: 'close'
@@ -161,22 +160,22 @@ Emitted when the stream and any of its underlying resources (a file
descriptor, for example) have been closed. The event indicates that
no more events will be emitted, and no further computation will occur.
-Not all streams will emit the 'close' event.
+Not all streams will emit the `'close'` event.
#### Event: 'data'
-* `chunk` {Buffer | String} The chunk of data.
+* `chunk` {Buffer|String} The chunk of data.
-Attaching a `data` event listener to a stream that has not been
+Attaching a `'data'` event listener to a stream that has not been
explicitly paused will switch the stream into flowing mode. Data will
then be passed as soon as it is available.
If you just want to get all the data out of the stream as fast as
possible, this is the best way to do so.
-```javascript
+```js
var readable = getReadableStreamSomehow();
-readable.on('data', function(chunk) {
+readable.on('data', (chunk) => {
console.log('got %d bytes of data', chunk.length);
});
```
@@ -185,16 +184,17 @@ readable.on('data', function(chunk) {
This event fires when there will be no more data to read.
-Note that the `end` event **will not fire** unless the data is
-completely consumed. This can be done by switching into flowing mode,
-or by calling `read()` repeatedly until you get to the end.
+Note that the `'end'` event **will not fire** unless the data is
+completely consumed. This can be done by switching into flowing mode,
+or by calling [`stream.read()`][stream-read] repeatedly until you get to the
+end.
-```javascript
+```js
var readable = getReadableStreamSomehow();
-readable.on('data', function(chunk) {
+readable.on('data', (chunk) => {
console.log('got %d bytes of data', chunk.length);
});
-readable.on('end', function() {
+readable.on('end', () => {
console.log('there will be no more data.');
});
```
@@ -216,30 +216,30 @@ hadn't already.
```javascript
var readable = getReadableStreamSomehow();
-readable.on('readable', function() {
+readable.on('readable', () => {
// there is some data to read now
});
```
-Once the internal buffer is drained, a `readable` event will fire
+Once the internal buffer is drained, a `'readable'` event will fire
again when more data is available.
-The `readable` event is not emitted in the "flowing" mode with the
+The `'readable'` event is not emitted in the "flowing" mode with the
sole exception of the last one, on end-of-stream.
-The 'readable' event indicates that the stream has new information:
+The `'readable'` event indicates that the stream has new information:
either new data is available or the end of the stream has been reached.
-In the former case, `.read()` will return that data. In the latter case,
-`.read()` will return null. For instance, in the following example, `foo.txt`
-is an empty file:
+In the former case, [`stream.read()`][stream-read] will return that data. In the
+latter case, [`stream.read()`][stream-read] will return null. For instance, in
+the following example, `foo.txt` is an empty file:
-```javascript
-var fs = require('fs');
+```js
+const fs = require('fs');
var rr = fs.createReadStream('foo.txt');
-rr.on('readable', function() {
+rr.on('readable', () => {
console.log('readable:', rr.read());
});
-rr.on('end', function() {
+rr.on('end', () => {
console.log('end');
});
```
@@ -247,20 +247,20 @@ rr.on('end', function() {
The output of running this script is:
```
-bash-3.2$ node test.js
+$ node test.js
readable: null
end
```
#### readable.isPaused()
-* Return: `Boolean`
+* Return: {Boolean}
This method returns whether or not the `readable` has been **explicitly**
-paused by client code (using `readable.pause()` without a corresponding
-`readable.resume()`).
+paused by client code (using [`stream.pause()`][stream-pause] without a
+corresponding [`stream.resume()`][stream-resume]).
-```javascript
+```js
var readable = new stream.Readable
readable.isPaused() // === false
@@ -275,16 +275,16 @@ readable.isPaused() // === false
* Return: `this`
This method will cause a stream in flowing mode to stop emitting
-`data` events, switching out of flowing mode. Any data that becomes
+[`'data'`][] events, switching out of flowing mode. Any data that becomes
available will remain in the internal buffer.
-```javascript
+```js
var readable = getReadableStreamSomehow();
-readable.on('data', function(chunk) {
+readable.on('data', (chunk) => {
console.log('got %d bytes of data', chunk.length);
readable.pause();
console.log('there will be no more data for 1 second');
- setTimeout(function() {
+ setTimeout(() => {
console.log('now data will start flowing again');
readable.resume();
}, 1000);
@@ -293,7 +293,7 @@ readable.on('data', function(chunk) {
#### readable.pipe(destination[, options])
-* `destination` {[Writable][] Stream} The destination for writing data
+* `destination` {stream.Writable} The destination for writing data
* `options` {Object} Pipe options
* `end` {Boolean} End the writer when the reader ends. Default = `true`
@@ -303,7 +303,7 @@ the destination is not overwhelmed by a fast readable stream.
Multiple destinations can be piped to safely.
-```javascript
+```js
var readable = getReadableStreamSomehow();
var writable = fs.createWriteStream('file.txt');
// All the data from readable goes into 'file.txt'
@@ -313,7 +313,7 @@ readable.pipe(writable);
This function returns the destination stream, so you can set up pipe
chains like so:
-```javascript
+```js
var r = fs.createReadStream('file.txt');
var z = zlib.createGzip();
var w = fs.createWriteStream('file.txt.gz');
@@ -322,51 +322,51 @@ r.pipe(z).pipe(w);
For example, emulating the Unix `cat` command:
-```javascript
+```js
process.stdin.pipe(process.stdout);
```
-By default [`end()`][] is called on the destination when the source stream
-emits `end`, so that `destination` is no longer writable. Pass `{ end:
-false }` as `options` to keep the destination stream open.
+By default [`stream.end()`][stream-end] is called on the destination when the
+source stream emits [`'end'`][], so that `destination` is no longer writable.
+Pass `{ end: false }` as `options` to keep the destination stream open.
This keeps `writer` open so that "Goodbye" can be written at the
end.
-```javascript
+```js
reader.pipe(writer, { end: false });
-reader.on('end', function() {
+reader.on('end', () => {
writer.end('Goodbye\n');
});
```
-Note that `process.stderr` and `process.stdout` are never closed until
+Note that [`process.stderr`][] and [`process.stdout`][] are never closed until
the process exits, regardless of the specified options.
#### readable.read([size])
* `size` {Number} Optional argument to specify how much data to read.
-* Return {String | Buffer | null}
+* Return {String|Buffer|Null}
The `read()` method pulls some data out of the internal buffer and
-returns it. If there is no data available, then it will return
+returns it. If there is no data available, then it will return
`null`.
If you pass in a `size` argument, then it will return that many
-bytes. If `size` bytes are not available, then it will return `null`,
+bytes. If `size` bytes are not available, then it will return `null`,
unless we've ended, in which case it will return the data remaining
in the buffer.
If you do not specify a `size` argument, then it will return all the
data in the internal buffer.
-This method should only be called in paused mode. In flowing mode,
+This method should only be called in paused mode. In flowing mode,
this method is called automatically until the internal buffer is
drained.
-```javascript
+```js
var readable = getReadableStreamSomehow();
-readable.on('readable', function() {
+readable.on('readable', () => {
var chunk;
while (null !== (chunk = readable.read())) {
console.log('got %d bytes of data', chunk.length);
@@ -375,27 +375,27 @@ readable.on('readable', function() {
```
If this method returns a data chunk, then it will also trigger the
-emission of a [`'data'` event][].
+emission of a [`'data'`][] event.
-Note that calling `readable.read([size])` after the `end` event has been
-triggered will return `null`. No runtime error will be raised.
+Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][]
+event has been triggered will return `null`. No runtime error will be raised.
#### readable.resume()
* Return: `this`
-This method will cause the readable stream to resume emitting `data`
+This method will cause the readable stream to resume emitting [`'data'`][]
events.
-This method will switch the stream into flowing mode. If you do *not*
+This method will switch the stream into flowing mode. If you do *not*
want to consume the data from a stream, but you *do* want to get to
-its `end` event, you can call [`readable.resume()`][] to open the flow of
-data.
+its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open
+the flow of data.
-```javascript
+```js
var readable = getReadableStreamSomehow();
readable.resume();
-readable.on('end', function() {
+readable.on('end', () => {
console.log('got to the end, but did not read anything');
});
```
@@ -405,22 +405,25 @@ readable.on('end', function() {
* `encoding` {String} The encoding to use.
* Return: `this`
-Call this function to cause the stream to return strings of the
-specified encoding instead of Buffer objects. For example, if you do
-`readable.setEncoding('utf8')`, then the output data will be
-interpreted as UTF-8 data, and returned as strings. If you do
-`readable.setEncoding('hex')`, then the data will be encoded in
-hexadecimal string format.
+Call this function to cause the stream to return strings of the specified
+encoding instead of Buffer objects. For example, if you do
+`readable.setEncoding('utf8')`, then the output data will be interpreted as
+UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`,
+then the data will be encoded in hexadecimal string format.
This properly handles multi-byte characters that would otherwise be
potentially mangled if you simply pulled the Buffers directly and
-called `buf.toString(encoding)` on them. If you want to read the data
+called [`buf.toString(encoding)`][] on them. If you want to read the data
as strings, always use this method.
-```javascript
+Also you can disable any encoding at all with `readable.setEncoding(null)`.
+This approach is very useful if you deal with binary data or with large
+multi-byte strings spread out over multiple chunks.
+
+```js
var readable = getReadableStreamSomehow();
readable.setEncoding('utf8');
-readable.on('data', function(chunk) {
+readable.on('data', (chunk) => {
assert.equal(typeof chunk, 'string');
console.log('got %d characters of string data', chunk.length);
});
@@ -428,22 +431,23 @@ readable.on('data', function(chunk) {
#### readable.unpipe([destination])
-* `destination` {[Writable][] Stream} Optional specific stream to unpipe
+* `destination` {stream.Writable} Optional specific stream to unpipe
-This method will remove the hooks set up for a previous `pipe()` call.
+This method will remove the hooks set up for a previous [`stream.pipe()`][]
+call.
If the destination is not specified, then all pipes are removed.
If the destination is specified, but no pipe is set up for it, then
this is a no-op.
-```javascript
+```js
var readable = getReadableStreamSomehow();
var writable = fs.createWriteStream('file.txt');
// All the data from readable goes into 'file.txt',
// but only for the first second
readable.pipe(writable);
-setTimeout(function() {
+setTimeout(() => {
console.log('stop writing to file.txt');
readable.unpipe(writable);
console.log('manually close the file stream');
@@ -453,25 +457,25 @@ setTimeout(function() {
#### readable.unshift(chunk)
-* `chunk` {Buffer | String} Chunk of data to unshift onto the read queue
+* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue
This is useful in certain cases where a stream is being consumed by a
parser, which needs to "un-consume" some data that it has
optimistically pulled out of the source, so that the stream can be
passed on to some other party.
-Note that `stream.unshift(chunk)` cannot be called after the `end` event
+Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event
has been triggered; a runtime error will be raised.
If you find that you must often call `stream.unshift(chunk)` in your
-programs, consider implementing a [Transform][] stream instead. (See API
-for Stream Implementors, below.)
+programs, consider implementing a [Transform][] stream instead. (See [API
+for Stream Implementors][].)
-```javascript
+```js
// Pull off a header delimited by \n\n
// use unshift() if we get too much
// Call the callback with (error, header, stream)
-var StringDecoder = require('string_decoder').StringDecoder;
+const StringDecoder = require('string_decoder').StringDecoder;
function parseHeader(stream, callback) {
stream.on('error', callback);
stream.on('readable', onReadable);
@@ -501,39 +505,41 @@ function parseHeader(stream, callback) {
}
}
```
-Note that, unlike `stream.push(chunk)`, `stream.unshift(chunk)` will not
-end the reading process by resetting the internal reading state of the
-stream. This can cause unexpected results if `unshift` is called during a
-read (i.e. from within a `_read` implementation on a custom stream). Following
-the call to `unshift` with an immediate `stream.push('')` will reset the
-reading state appropriately, however it is best to simply avoid calling
-`unshift` while in the process of performing a read.
+
+Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)`
+will not end the reading process by resetting the internal reading state of the
+stream. This can cause unexpected results if `unshift()` is called during a
+read (i.e. from within a [`stream._read()`][stream-_read] implementation on a
+custom stream). Following the call to `unshift()` with an immediate
+[`stream.push('')`][stream-push] will reset the reading state appropriately,
+however it is best to simply avoid calling `unshift()` while in the process of
+performing a read.
#### readable.wrap(stream)
* `stream` {Stream} An "old style" readable stream
Versions of Node.js prior to v0.10 had streams that did not implement the
-entire Streams API as it is today. (See "Compatibility" below for
+entire Streams API as it is today. (See [Compatibility][] for
more information.)
-If you are using an older Node.js library that emits `'data'` events and
-has a [`pause()`][] method that is advisory only, then you can use the
-`wrap()` method to create a [Readable][] stream that uses the old stream
-as its data source.
+If you are using an older Node.js library that emits [`'data'`][] events and
+has a [`stream.pause()`][stream-pause] method that is advisory only, then you
+can use the `wrap()` method to create a [Readable][] stream that uses the old
+stream as its data source.
You will very rarely ever need to call this function, but it exists
as a convenience for interacting with old Node.js programs and libraries.
For example:
-```javascript
-var OldReader = require('./old-api-module.js').OldReader;
-var oreader = new OldReader;
-var Readable = require('stream').Readable;
-var myReader = new Readable().wrap(oreader);
+```js
+const OldReader = require('./old-api-module.js').OldReader;
+const Readable = require('stream').Readable;
+const oreader = new OldReader;
+const myReader = new Readable().wrap(oreader);
-myReader.on('readable', function() {
+myReader.on('readable', () => {
myReader.read(); // etc.
});
```
@@ -541,13 +547,13 @@ myReader.on('readable', function() {
### Class: stream.Transform
Transform streams are [Duplex][] streams where the output is in some way
-computed from the input. They implement both the [Readable][] and
-[Writable][] interfaces. See above for usage.
+computed from the input. They implement both the [Readable][] and
+[Writable][] interfaces.
Examples of Transform streams include:
-* [zlib streams][]
-* [crypto streams][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
### Class: stream.Writable
@@ -558,22 +564,22 @@ that you are writing data *to*.
Examples of writable streams include:
-* [http requests, on the client][]
-* [http responses, on the server][]
+* [HTTP requests, on the client][]
+* [HTTP responses, on the server][]
* [fs write streams][]
-* [zlib streams][]
-* [crypto streams][]
-* [tcp sockets][]
+* [zlib streams][zlib]
+* [crypto streams][crypto]
+* [TCP sockets][]
* [child process stdin][]
-* [process.stdout][], [process.stderr][]
+* [`process.stdout`][], [`process.stderr`][]
#### Event: 'drain'
-If a [`writable.write(chunk)`][] call returns false, then the `drain`
-event will indicate when it is appropriate to begin writing more data
+If a [`stream.write(chunk)`][stream-write] call returns `false`, then the
+`'drain'` event will indicate when it is appropriate to begin writing more data
to the stream.
-```javascript
+```js
// Write the data to the supplied writable stream one million times.
// Be attentive to back-pressure.
function writeOneMillionTimes(writer, data, encoding, callback) {
@@ -603,37 +609,37 @@ function writeOneMillionTimes(writer, data, encoding, callback) {
#### Event: 'error'
-* {Error object}
+* {Error}
Emitted if there was an error when writing or piping data.
#### Event: 'finish'
-When the [`end()`][] method has been called, and all data has been flushed
-to the underlying system, this event is emitted.
+When the [`stream.end()`][stream-end] method has been called, and all data has
+been flushed to the underlying system, this event is emitted.
```javascript
var writer = getWritableStreamSomehow();
for (var i = 0; i < 100; i ++) {
- writer.write('hello, #' + i + '!\n');
+ writer.write('hello, #${i}!\n');
}
writer.end('this is the end\n');
-writer.on('finish', function() {
+writer.on('finish', () => {
console.error('all writes are now complete.');
});
```
#### Event: 'pipe'
-* `src` {[Readable][] Stream} source stream that is piping to this writable
+* `src` {stream.Readable} source stream that is piping to this writable
-This is emitted whenever the `pipe()` method is called on a readable
+This is emitted whenever the [`stream.pipe()`][] method is called on a readable
stream, adding this writable to its set of destinations.
-```javascript
+```js
var writer = getWritableStreamSomehow();
var reader = getReadableStreamSomehow();
-writer.on('pipe', function(src) {
+writer.on('pipe', (src) => {
console.error('something is piping into the writer');
assert.equal(src, reader);
});
@@ -642,15 +648,16 @@ reader.pipe(writer);
#### Event: 'unpipe'
-* `src` {[Readable][] Stream} The source stream that [unpiped][] this writable
+* `src` {[Readable][] Stream} The source stream that
+ [unpiped][`stream.unpipe()`] this writable
-This is emitted whenever the [`unpipe()`][] method is called on a
+This is emitted whenever the [`stream.unpipe()`][] method is called on a
readable stream, removing this writable from its set of destinations.
-```javascript
+```js
var writer = getWritableStreamSomehow();
var reader = getReadableStreamSomehow();
-writer.on('unpipe', function(src) {
+writer.on('unpipe', (src) => {
console.error('something has stopped piping into the writer');
assert.equal(src, reader);
});
@@ -662,20 +669,22 @@ reader.unpipe(writer);
Forces buffering of all writes.
-Buffered data will be flushed either at `.uncork()` or at `.end()` call.
+Buffered data will be flushed either at [`stream.uncork()`][] or at
+[`stream.end()`][stream-end] call.
#### writable.end([chunk][, encoding][, callback])
-* `chunk` {String | Buffer} Optional data to write
+* `chunk` {String|Buffer} Optional data to write
* `encoding` {String} The encoding, if `chunk` is a String
* `callback` {Function} Optional callback for when the stream is finished
-Call this method when no more data will be written to the stream. If
-supplied, the callback is attached as a listener on the `finish` event.
+Call this method when no more data will be written to the stream. If supplied,
+the callback is attached as a listener on the [`'finish'`][] event.
-Calling [`write()`][] after calling [`end()`][] will raise an error.
+Calling [`stream.write()`][stream-write] after calling
+[`stream.end()`][stream-end] will raise an error.
-```javascript
+```js
// write 'hello, ' and then end with 'world!'
var file = fs.createWriteStream('example.txt');
file.write('hello, ');
@@ -691,26 +700,26 @@ Sets the default encoding for a writable stream.
#### writable.uncork()
-Flush all data, buffered since `.cork()` call.
+Flush all data, buffered since [`stream.cork()`][] call.
#### writable.write(chunk[, encoding][, callback])
-* `chunk` {String | Buffer} The data to write
+* `chunk` {String|Buffer} The data to write
* `encoding` {String} The encoding, if `chunk` is a String
* `callback` {Function} Callback for when this chunk of data is flushed
-* Returns: {Boolean} True if the data was handled completely.
+* Returns: {Boolean} `true` if the data was handled completely.
This method writes some data to the underlying system, and calls the
supplied callback once the data has been fully handled.
The return value indicates if you should continue writing right now.
If the data had to be buffered internally, then it will return
-`false`. Otherwise, it will return `true`.
+`false`. Otherwise, it will return `true`.
-This return value is strictly advisory. You MAY continue to write,
-even if it returns `false`. However, writes will be buffered in
-memory, so it is best not to do this excessively. Instead, wait for
-the `drain` event before writing more data.
+This return value is strictly advisory. You MAY continue to write,
+even if it returns `false`. However, writes will be buffered in
+memory, so it is best not to do this excessively. Instead, wait for
+the [`'drain'`][] event before writing more data.
## API for Stream Implementors
@@ -719,11 +728,11 @@ the `drain` event before writing more data.
To implement any sort of stream, the pattern is the same:
-1. Extend the appropriate parent class in your own subclass. (The
- [`util.inherits`][] method is particularly helpful for this.)
+1. Extend the appropriate parent class in your own subclass. (The
+ [`util.inherits()`][] method is particularly helpful for this.)
2. Call the appropriate parent class constructor in your constructor,
to be sure that the internal mechanisms are set up properly.
-2. Implement one or more specific methods, as detailed below.
+3. Implement one or more specific methods, as detailed below.
The class to extend and the method(s) to implement depend on the sort
of stream class you are writing:
@@ -750,7 +759,7 @@ of stream class you are writing:
[Readable](#stream_class_stream_readable_1)
- [_read][]
+ [_read][stream-_read]
@@ -761,7 +770,7 @@ of stream class you are writing:
[Writable](#stream_class_stream_writable_1)
- [_write][]
, _writev
+ [_write][stream-_write]
, [_writev][stream-_writev]
@@ -772,7 +781,7 @@ of stream class you are writing:
[Duplex](#stream_class_stream_duplex_1)
- [_read][]
, [_write][]
, _writev
+ [_read][stream-_read]
, [_write][stream-_write]
, [_writev][stream-_writev]
@@ -783,45 +792,45 @@ of stream class you are writing:
[Transform](#stream_class_stream_transform_1)
- _transform
, _flush
+ [_transform][stream-_transform]
, [_flush][stream-_flush]
-In your implementation code, it is very important to never call the
-methods described in [API for Stream Consumers][] above. Otherwise, you
-can potentially cause adverse side effects in programs that consume
-your streaming interfaces.
+In your implementation code, it is very important to never call the methods
+described in [API for Stream Consumers][]. Otherwise, you can potentially cause
+adverse side effects in programs that consume your streaming interfaces.
### Class: stream.Duplex
-A "duplex" stream is one that is both Readable and Writable, such as a
-TCP socket connection.
+A "duplex" stream is one that is both Readable and Writable, such as a TCP
+socket connection.
Note that `stream.Duplex` is an abstract class designed to be extended
-with an underlying implementation of the `_read(size)` and
-[`_write(chunk, encoding, callback)`][] methods as you would with a
-Readable or Writable stream class.
+with an underlying implementation of the [`stream._read(size)`][stream-_read]
+and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you
+would with a Readable or Writable stream class.
-Since JavaScript doesn't have multiple prototypal inheritance, this
-class prototypally inherits from Readable, and then parasitically from
-Writable. It is thus up to the user to implement both the lowlevel
-`_read(n)` method as well as the lowlevel
-[`_write(chunk, encoding, callback)`][] method on extension duplex classes.
+Since JavaScript doesn't have multiple prototypal inheritance, this class
+prototypally inherits from Readable, and then parasitically from Writable. It is
+thus up to the user to implement both the low-level
+[`stream._read(n)`][stream-_read] method as well as the low-level
+[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension
+duplex classes.
#### new stream.Duplex(options)
* `options` {Object} Passed to both Writable and Readable
constructors. Also has the following fields:
- * `allowHalfOpen` {Boolean} Default=true. If set to `false`, then
+ * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then
the stream will automatically end the readable side when the
writable side ends and vice versa.
- * `readableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
for readable side of the stream. Has no effect if `objectMode`
is `true`.
- * `writableObjectMode` {Boolean} Default=false. Sets `objectMode`
+ * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
for writable side of the stream. Has no effect if `objectMode`
is `true`.
@@ -832,7 +841,7 @@ initialized.
### Class: stream.PassThrough
This is a trivial implementation of a [Transform][] stream that simply
-passes the input bytes across to the output. Its purpose is mainly
+passes the input bytes across to the output. Its purpose is mainly
for examples and testing, but there are occasionally use cases where
it can come in handy as a building block for novel sorts of streams.
@@ -841,10 +850,10 @@ it can come in handy as a building block for novel sorts of streams.
`stream.Readable` is an abstract class designed to be extended with an
-underlying implementation of the [`_read(size)`][] method.
+underlying implementation of the [`stream._read(size)`][stream-_read] method.
-Please see above under [API for Stream Consumers][] for how to consume
-streams in your programs. What follows is an explanation of how to
+Please see [API for Stream Consumers][] for how to consume
+streams in your programs. What follows is an explanation of how to
implement Readable streams in your programs.
#### new stream.Readable([options])
@@ -852,12 +861,14 @@ implement Readable streams in your programs.
* `options` {Object}
* `highWaterMark` {Number} The maximum number of bytes to store in
the internal buffer before ceasing to read from the underlying
- resource. Default=16kb, or 16 for `objectMode` streams
+ resource. Default = `16384` (16kb), or `16` for `objectMode` streams
* `encoding` {String} If specified, then buffers will be decoded to
- strings using the specified encoding. Default=null
+ strings using the specified encoding. Default = `null`
* `objectMode` {Boolean} Whether this stream should behave
- as a stream of objects. Meaning that stream.read(n) returns
- a single value instead of a Buffer of size n. Default=false
+ as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns
+ a single value instead of a Buffer of size n. Default = `false`
+ * `read` {Function} Implementation for the [`stream._read()`][stream-_read]
+ method.
In classes that extend the Readable class, make sure to call the
Readable constructor so that the buffering settings can be properly
@@ -871,29 +882,31 @@ Note: **Implement this method, but do NOT call it directly.**
This method is prefixed with an underscore because it is internal to the
class that defines it and should only be called by the internal Readable
-class methods. All Readable stream implementations must provide a _read
+class methods. All Readable stream implementations must provide a \_read
method to fetch data from the underlying resource.
-When _read is called, if data is available from the resource, `_read` should
-start pushing that data into the read queue by calling `this.push(dataChunk)`.
-`_read` should continue reading from the resource and pushing data until push
-returns false, at which point it should stop reading from the resource. Only
-when _read is called again after it has stopped should it start reading
-more data from the resource and pushing that data onto the queue.
+When `_read()` is called, if data is available from the resource, the `_read()`
+implementation should start pushing that data into the read queue by calling
+[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from
+the resource and pushing data until push returns `false`, at which point it
+should stop reading from the resource. Only when `_read()` is called again after
+it has stopped should it start reading more data from the resource and pushing
+that data onto the queue.
Note: once the `_read()` method is called, it will not be called again until
-the `push` method is called.
+the [`stream.push()`][stream-push] method is called.
-The `size` argument is advisory. Implementations where a "read" is a
+The `size` argument is advisory. Implementations where a "read" is a
single call that returns data can use this to know how much data to
-fetch. Implementations where that is not relevant, such as TCP or
+fetch. Implementations where that is not relevant, such as TCP or
TLS, may ignore this argument, and simply provide data whenever it
-becomes available. There is no need, for example to "wait" until
-`size` bytes are available before calling [`stream.push(chunk)`][].
+becomes available. There is no need, for example to "wait" until
+`size` bytes are available before calling [`stream.push(chunk)`][stream-push].
#### readable.push(chunk[, encoding])
-* `chunk` {Buffer | null | String} Chunk of data to push into the read queue
+
+* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue
* `encoding` {String} Encoding of String chunks. Must be a valid
Buffer encoding, such as `'utf8'` or `'ascii'`
* return {Boolean} Whether or not more pushes should be performed
@@ -906,15 +919,15 @@ into the queue for subsequent stream processors to consume. If `null` is
passed, it signals the end of the stream (EOF), after which no more data
can be written.
-The data added with `push` can be pulled out by calling the `read()` method
-when the `'readable'`event fires.
+The data added with `push()` can be pulled out by calling the
+[`stream.read()`][stream-read] method when the [`'readable'`][] event fires.
-This API is designed to be as flexible as possible. For example,
+This API is designed to be as flexible as possible. For example,
you may be wrapping a lower-level source which has some sort of
-pause/resume mechanism, and a data callback. In those cases, you
+pause/resume mechanism, and a data callback. In those cases, you
could wrap the low-level source object by doing something like this:
-```javascript
+```js
// source is an object with readStop() and readStart() methods,
// and an `ondata` member that gets called when it has data, and
// an `onend` member that gets called when the data is over.
@@ -925,18 +938,17 @@ function SourceWrapper(options) {
Readable.call(this, options);
this._source = getLowlevelSourceObject();
- var self = this;
// Every time there's data, we push it into the internal buffer.
- this._source.ondata = function(chunk) {
+ this._source.ondata = (chunk) => {
// if push() returns false, then we need to stop reading from source
- if (!self.push(chunk))
- self._source.readStop();
+ if (!this.push(chunk))
+ this._source.readStop();
};
// When the source ends, we push the EOF-signaling `null` chunk
- this._source.onend = function() {
- self.push(null);
+ this._source.onend = () => {
+ this.push(null);
};
}
@@ -951,12 +963,12 @@ SourceWrapper.prototype._read = function(size) {
-This is a basic example of a Readable stream. It emits the numerals
+This is a basic example of a Readable stream. It emits the numerals
from 1 to 1,000,000 in ascending order, and then ends.
-```javascript
-var Readable = require('stream').Readable;
-var util = require('util');
+```js
+const Readable = require('stream').Readable;
+const util = require('util');
util.inherits(Counter, Readable);
function Counter(opt) {
@@ -979,24 +991,25 @@ Counter.prototype._read = function() {
#### Example: SimpleProtocol v1 (Sub-optimal)
-This is similar to the `parseHeader` function described above, but
-implemented as a custom stream. Also, note that this implementation
-does not convert the incoming data to a string.
+This is similar to the `parseHeader` function described
+[here](#stream_readable_unshift_chunk), but implemented as a custom stream.
+Also, note that this implementation does not convert the incoming data to a
+string.
-However, this would be better implemented as a [Transform][] stream. See
-below for a better implementation.
+However, this would be better implemented as a [Transform][] stream. See
+[SimpleProtocol v2][] for a better implementation.
-```javascript
+```js
// A parser for a simple data protocol.
// The "header" is a JSON object, followed by 2 \n characters, and
// then a message body.
//
// NOTE: This can be done more simply as a Transform stream!
-// Using Readable directly for this is sub-optimal. See the
+// Using Readable directly for this is sub-optimal. See the
// alternative example below under the Transform section.
-var Readable = require('stream').Readable;
-var util = require('util');
+const Readable = require('stream').Readable;
+const util = require('util');
util.inherits(SimpleProtocol, Readable);
@@ -1012,13 +1025,13 @@ function SimpleProtocol(source, options) {
this._source = source;
var self = this;
- source.on('end', function() {
+ source.on('end', () => {
self.push(null);
});
// give it a kick whenever the source is readable
// read(0) will not consume any bytes
- source.on('readable', function() {
+ source.on('readable', () => {
self.read(0);
});
@@ -1098,19 +1111,24 @@ connected in some way to the input, such as a [zlib][] stream or a
[crypto][] stream.
There is no requirement that the output be the same size as the input,
-the same number of chunks, or arrive at the same time. For example, a
+the same number of chunks, or arrive at the same time. For example, a
Hash stream will only ever have a single chunk of output which is
-provided when the input is ended. A zlib stream will produce output
+provided when the input is ended. A zlib stream will produce output
that is either much smaller or much larger than its input.
-Rather than implement the [`_read()`][] and [`_write()`][] methods, Transform
-classes must implement the `_transform()` method, and may optionally
-also implement the `_flush()` method. (See below.)
+Rather than implement the [`stream._read()`][stream-_read] and
+[`stream._write()`][stream-_write] methods, Transform classes must implement the
+[`stream._transform()`][stream-_transform] method, and may optionally
+also implement the [`stream._flush()`][stream-_flush] method. (See below.)
#### new stream.Transform([options])
* `options` {Object} Passed to both Writable and Readable
- constructors.
+ constructors. Also has the following fields:
+ * `transform` {Function} Implementation for the
+ [`stream._transform()`][stream-_transform] method.
+ * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush]
+ method.
In classes that extend the Transform class, make sure to call the
constructor so that the buffering settings can be properly
@@ -1118,11 +1136,12 @@ initialized.
#### Events: 'finish' and 'end'
-The [`finish`][] and [`end`][] events are from the parent Writable
-and Readable classes respectively. The `finish` event is fired after
-`.end()` is called and all chunks have been processed by `_transform`,
-`end` is fired after all data has been output which is after the callback
-in `_flush` has been called.
+The [`'finish'`][] and [`'end'`][] events are from the parent Writable
+and Readable classes respectively. The `'finish'` event is fired after
+[`stream.end()`][stream-end] is called and all chunks have been processed by
+[`stream._transform()`][stream-_transform], `'end'` is fired after all data has
+been output which is after the callback in [`stream._flush()`][stream-_flush]
+has been called.
#### transform.\_flush(callback)
@@ -1134,26 +1153,26 @@ by child classes, and if so, will be called by the internal Transform
class methods only.
In some cases, your transform operation may need to emit a bit more
-data at the end of the stream. For example, a `Zlib` compression
+data at the end of the stream. For example, a `Zlib` compression
stream will store up some internal state so that it can optimally
-compress the output. At the end, however, it needs to do the best it
+compress the output. At the end, however, it needs to do the best it
can with what is left, so that the data will be complete.
-In those cases, you can implement a `_flush` method, which will be
+In those cases, you can implement a `_flush()` method, which will be
called at the very end, after all the written data is consumed, but
-before emitting `end` to signal the end of the readable side. Just
-like with `_transform`, call `transform.push(chunk)` zero or more
-times, as appropriate, and call `callback` when the flush operation is
-complete.
+before emitting [`'end'`][] to signal the end of the readable side. Just
+like with [`stream._transform()`][stream-_transform], call
+`transform.push(chunk)` zero or more times, as appropriate, and call `callback`
+when the flush operation is complete.
This method is prefixed with an underscore because it is internal to
the class that defines it, and should not be called directly by user
-programs. However, you **are** expected to override this method in
+programs. However, you **are** expected to override this method in
your own extension classes.
#### transform.\_transform(chunk, encoding, callback)
-* `chunk` {Buffer | String} The chunk to be transformed. Will **always**
+* `chunk` {Buffer|String} The chunk to be transformed. Will **always**
be a buffer unless the `decodeStrings` option was set to `false`.
* `encoding` {String} If the chunk is a string, then this is the
encoding type. If chunk is a buffer, then this is the special
@@ -1165,12 +1184,12 @@ Note: **This function MUST NOT be called directly.** It should be
implemented by child classes, and called by the internal Transform
class methods only.
-All Transform stream implementations must provide a `_transform`
+All Transform stream implementations must provide a `_transform()`
method to accept input and produce output.
-`_transform` should do whatever has to be done in this specific
+`_transform()` should do whatever has to be done in this specific
Transform class, to handle the bytes being written, and pass them off
-to the readable portion of the interface. Do asynchronous I/O,
+to the readable portion of the interface. Do asynchronous I/O,
process things, and so on.
Call `transform.push(outputChunk)` 0 or more times to generate output
@@ -1178,12 +1197,12 @@ from this input chunk, depending on how much data you want to output
as a result of this chunk.
Call the callback function only when the current chunk is completely
-consumed. Note that there may or may not be output as a result of any
+consumed. Note that there may or may not be output as a result of any
particular input chunk. If you supply a second argument to the callback
it will be passed to the push method. In other words the following are
equivalent:
-```javascript
+```js
transform.prototype._transform = function (data, encoding, callback) {
this.push(data);
callback();
@@ -1196,22 +1215,23 @@ transform.prototype._transform = function (data, encoding, callback) {
This method is prefixed with an underscore because it is internal to
the class that defines it, and should not be called directly by user
-programs. However, you **are** expected to override this method in
+programs. However, you **are** expected to override this method in
your own extension classes.
#### Example: `SimpleProtocol` parser v2
-The example above of a simple protocol parser can be implemented
-simply by using the higher level [Transform][] stream class, similar to
-the `parseHeader` and `SimpleProtocol v1` examples above.
+The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple
+protocol parser can be implemented simply by using the higher level
+[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol
+v1` examples.
In this example, rather than providing the input as an argument, it
would be piped into the parser, which is a more idiomatic Node.js stream
approach.
```javascript
-var util = require('util');
-var Transform = require('stream').Transform;
+const util = require('util');
+const Transform = require('stream').Transform;
util.inherits(SimpleProtocol, Transform);
function SimpleProtocol(options) {
@@ -1282,22 +1302,30 @@ SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
`stream.Writable` is an abstract class designed to be extended with an
-underlying implementation of the [`_write(chunk, encoding, callback)`][] method.
+underlying implementation of the
+[`stream._write(chunk, encoding, callback)`][stream-_write] method.
-Please see above under [API for Stream Consumers][] for how to consume
-writable streams in your programs. What follows is an explanation of
+Please see [API for Stream Consumers][] for how to consume
+writable streams in your programs. What follows is an explanation of
how to implement Writable streams in your programs.
#### new stream.Writable([options])
* `options` {Object}
- * `highWaterMark` {Number} Buffer level when [`write()`][] starts
- returning false. Default=16kb, or 16 for `objectMode` streams
+ * `highWaterMark` {Number} Buffer level when
+ [`stream.write()`][stream-write] starts returning `false`. Default = `16384`
+ (16kb), or `16` for `objectMode` streams.
* `decodeStrings` {Boolean} Whether or not to decode strings into
- Buffers before passing them to [`_write()`][]. Default=true
- * `objectMode` {Boolean} Whether or not the `write(anyObj)` is
- a valid operation. If set you can write arbitrary data instead
- of only `Buffer` / `String` data. Default=false
+ Buffers before passing them to [`stream._write()`][stream-_write].
+ Default = `true`
+ * `objectMode` {Boolean} Whether or not the
+ [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can
+ write arbitrary data instead of only `Buffer` / `String` data.
+ Default = `false`
+ * `write` {Function} Implementation for the
+ [`stream._write()`][stream-_write] method.
+ * `writev` {Function} Implementation for the
+ [`stream._writev()`][stream-_writev] method.
In classes that extend the Writable class, make sure to call the
constructor so that the buffering settings can be properly
@@ -1305,7 +1333,7 @@ initialized.
#### writable.\_write(chunk, encoding, callback)
-* `chunk` {Buffer | String} The chunk to be written. Will **always**
+* `chunk` {Buffer|String} The chunk to be written. Will **always**
be a buffer unless the `decodeStrings` option was set to `false`.
* `encoding` {String} If the chunk is a string, then this is the
encoding type. If chunk is a buffer, then this is the special
@@ -1313,8 +1341,9 @@ initialized.
* `callback` {Function} Call this function (optionally with an error
argument) when you are done processing the supplied chunk.
-All Writable stream implementations must provide a [`_write()`][]
-method to send data to the underlying resource.
+All Writable stream implementations must provide a
+[`stream._write()`][stream-_write] method to send data to the underlying
+resource.
Note: **This function MUST NOT be called directly.** It should be
implemented by child classes, and called by the internal Writable
@@ -1325,20 +1354,20 @@ signal that the write completed successfully or with an error.
If the `decodeStrings` flag is set in the constructor options, then
`chunk` may be a string rather than a Buffer, and `encoding` will
-indicate the sort of string that it is. This is to support
+indicate the sort of string that it is. This is to support
implementations that have an optimized handling for certain string
-data encodings. If you do not explicitly set the `decodeStrings`
+data encodings. If you do not explicitly set the `decodeStrings`
option to `false`, then you can safely ignore the `encoding` argument,
and assume that `chunk` will always be a Buffer.
This method is prefixed with an underscore because it is internal to
the class that defines it, and should not be called directly by user
-programs. However, you **are** expected to override this method in
+programs. However, you **are** expected to override this method in
your own extension classes.
#### writable.\_writev(chunks, callback)
-* `chunks` {Array} The chunks to be written. Each chunk has following
+* `chunks` {Array} The chunks to be written. Each chunk has following
format: `{ chunk: ..., encoding: ... }`.
* `callback` {Function} Call this function (optionally with an error
argument) when you are done processing the supplied chunks.
@@ -1348,7 +1377,7 @@ implemented by child classes, and called by the internal Writable
class methods only.
This function is completely optional to implement. In most cases it is
-unnecessary. If implemented, it will be called with all the chunks
+unnecessary. If implemented, it will be called with all the chunks
that are buffered in the write queue.
@@ -1356,14 +1385,16 @@ that are buffered in the write queue.
-In simple cases there is now the added benefit of being able to construct a stream without inheritance.
+In simple cases there is now the added benefit of being able to construct a
+stream without inheritance.
This can be done by passing the appropriate methods as constructor options:
Examples:
### Duplex
-```javascript
+
+```js
var duplex = new stream.Duplex({
read: function(n) {
// sets this._read under the hood
@@ -1400,7 +1431,8 @@ var duplex = new stream.Duplex({
```
### Readable
-```javascript
+
+```js
var readable = new stream.Readable({
read: function(n) {
// sets this._read under the hood
@@ -1413,7 +1445,8 @@ var readable = new stream.Readable({
```
### Transform
-```javascript
+
+```js
var transform = new stream.Transform({
transform: function(chunk, encoding, next) {
// sets this._transform under the hood
@@ -1436,7 +1469,8 @@ var transform = new stream.Transform({
```
### Writable
-```javascript
+
+```js
var writable = new stream.Writable({
write: function(chunk, encoding, next) {
// sets this._write under the hood
@@ -1474,14 +1508,14 @@ The amount of data that will potentially be buffered depends on the
`highWaterMark` option which is passed into the constructor.
Buffering in Readable streams happens when the implementation calls
-[`stream.push(chunk)`][]. If the consumer of the Stream does not call
-`stream.read()`, then the data will sit in the internal queue until it
-is consumed.
+[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not
+call [`stream.read()`][stream-read], then the data will sit in the internal
+queue until it is consumed.
Buffering in Writable streams happens when the user calls
-[`stream.write(chunk)`][] repeatedly, even when `write()` returns `false`.
+[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`.
-The purpose of streams, especially with the `pipe()` method, is to
+The purpose of streams, especially with the [`stream.pipe()`][] method, is to
limit the buffering of data to acceptable levels, so that sources and
destinations of varying speed will not overwhelm the available memory.
@@ -1492,36 +1526,37 @@ destinations of varying speed will not overwhelm the available memory.
In versions of Node.js prior to v0.10, the Readable stream interface was
simpler, but also less powerful and less useful.
-* Rather than waiting for you to call the `read()` method, `'data'`
- events would start emitting immediately. If you needed to do some
- I/O to decide how to handle data, then you had to store the chunks
+* Rather than waiting for you to call the [`stream.read()`][stream-read] method,
+ [`'data'`][] events would start emitting immediately. If you needed to do
+ some I/O to decide how to handle data, then you had to store the chunks
in some kind of buffer so that they would not be lost.
-* The [`pause()`][] method was advisory, rather than guaranteed. This
- meant that you still had to be prepared to receive `'data'` events
- even when the stream was in a paused state.
+* The [`stream.pause()`][stream-pause] method was advisory, rather than
+ guaranteed. This meant that you still had to be prepared to receive
+ [`'data'`][] events even when the stream was in a paused state.
-In Node.js v0.10, the Readable class described below was added.
+In Node.js v0.10, the [Readable][] class was added.
For backwards compatibility with older Node.js programs, Readable streams
-switch into "flowing mode" when a `'data'` event handler is added, or
-when the [`resume()`][] method is called. The effect is that, even if
-you are not using the new `read()` method and `'readable'` event, you
-no longer have to worry about losing `'data'` chunks.
+switch into "flowing mode" when a [`'data'`][] event handler is added, or
+when the [`stream.resume()`][stream-resume] method is called. The effect is
+that, even if you are not using the new [`stream.read()`][stream-read] method
+and [`'readable'`][] event, you no longer have to worry about losing
+[`'data'`][] chunks.
-Most programs will continue to function normally. However, this
+Most programs will continue to function normally. However, this
introduces an edge case in the following conditions:
-* No [`'data'` event][] handler is added.
-* The [`resume()`][] method is never called.
+* No [`'data'`][] event handler is added.
+* The [`stream.resume()`][stream-resume] method is never called.
* The stream is not piped to any writable destination.
For example, consider the following code:
-```javascript
+```js
// WARNING! BROKEN!
-net.createServer(function(socket) {
+net.createServer((socket) => {
// we add an 'end' method, but never consume the data
- socket.on('end', function() {
+ socket.on('end', () => {
// It will never get here.
socket.end('I got your message (but didnt read it)\n');
});
@@ -1530,17 +1565,17 @@ net.createServer(function(socket) {
```
In versions of Node.js prior to v0.10, the incoming message data would be
-simply discarded. However, in Node.js v0.10 and beyond,
+simply discarded. However, in Node.js v0.10 and beyond,
the socket will remain paused forever.
-The workaround in this situation is to call the `resume()` method to
-start the flow of data:
+The workaround in this situation is to call the
+[`stream.resume()`][stream-resume] method to start the flow of data:
-```javascript
+```js
// Workaround
-net.createServer(function(socket) {
+net.createServer((socket) => {
- socket.on('end', function() {
+ socket.on('end', () => {
socket.end('I got your message (but didnt read it)\n');
});
@@ -1552,7 +1587,7 @@ net.createServer(function(socket) {
In addition to new Readable streams switching into flowing mode,
pre-v0.10 style streams can be wrapped in a Readable class using the
-`wrap()` method.
+[`stream.wrap()`][] method.
### Object Mode
@@ -1565,33 +1600,33 @@ Streams that are in **object mode** can emit generic JavaScript values
other than Buffers and Strings.
A Readable stream in object mode will always return a single item from
-a call to `stream.read(size)`, regardless of what the size argument
-is.
+a call to [`stream.read(size)`][stream-read], regardless of what the size
+argument is.
A Writable stream in object mode will always ignore the `encoding`
-argument to `stream.write(data, encoding)`.
+argument to [`stream.write(data, encoding)`][stream-write].
The special value `null` still retains its special value for object
-mode streams. That is, for object mode readable streams, `null` as a
-return value from `stream.read()` indicates that there is no more
-data, and [`stream.push(null)`][] will signal the end of stream data
+mode streams. That is, for object mode readable streams, `null` as a
+return value from [`stream.read()`][stream-read] indicates that there is no more
+data, and [`stream.push(null)`][stream-push] will signal the end of stream data
(`EOF`).
-No streams in Node.js core are object mode streams. This pattern is only
+No streams in Node.js core are object mode streams. This pattern is only
used by userland streaming libraries.
You should set `objectMode` in your stream child class constructor on
-the options object. Setting `objectMode` mid-stream is not safe.
+the options object. Setting `objectMode` mid-stream is not safe.
For Duplex streams `objectMode` can be set exclusively for readable or
writable side with `readableObjectMode` and `writableObjectMode`
respectively. These options can be used to implement parsers and
serializers with Transform streams.
-```javascript
-var util = require('util');
-var StringDecoder = require('string_decoder').StringDecoder;
-var Transform = require('stream').Transform;
+```js
+const util = require('util');
+const StringDecoder = require('string_decoder').StringDecoder;
+const Transform = require('stream').Transform;
util.inherits(JSONParseStream, Transform);
// Gets \n-delimited JSON string data, and emits the parsed objects
@@ -1646,12 +1681,12 @@ JSONParseStream.prototype._flush = function(cb) {
There are some cases where you want to trigger a refresh of the
underlying readable stream mechanisms, without actually consuming any
-data. In that case, you can call `stream.read(0)`, which will always
+data. In that case, you can call `stream.read(0)`, which will always
return null.
If the internal read buffer is below the `highWaterMark`, and the
-stream is not currently reading, then calling `read(0)` will trigger
-a low-level `_read` call.
+stream is not currently reading, then calling `stream.read(0)` will trigger
+a low-level [`stream._read()`][stream-_read] call.
There is almost never a need to do this. However, you will see some
cases in Node.js's internals where this is done, particularly in the
@@ -1660,71 +1695,66 @@ Readable stream class internals.
### `stream.push('')`
Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
-interesting side effect. Because it *is* a call to
-[`stream.push()`][], it will end the `reading` process. However, it
+interesting side effect. Because it *is* a call to
+[`stream.push()`][stream-push], it will end the `reading` process. However, it
does *not* add any data to the readable buffer, so there's nothing for
a user to consume.
Very rarely, there are cases where you have no data to provide now,
but the consumer of your stream (or, perhaps, another bit of your own
-code) will know when to check again, by calling `stream.read(0)`. In
-those cases, you *may* call `stream.push('')`.
+code) will know when to check again, by calling [`stream.read(0)`][stream-read].
+In those cases, you *may* call `stream.push('')`.
So far, the only use case for this functionality is in the
-[tls.CryptoStream][] class, which is deprecated in Node.js/io.js v1.0. If you
+[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you
find that you have to use `stream.push('')`, please consider another
approach, because it almost certainly indicates that something is
horribly wrong.
-[request to an HTTP server]: https://nodejs.org/docs/v5.1.0/api/http.html#http_http_incomingmessage
-[EventEmitter]: https://nodejs.org/docs/v5.1.0/api/events.html#events_class_events_eventemitter
-[Object mode]: #stream_object_mode
-[`stream.push(chunk)`]: #stream_readable_push_chunk_encoding
-[`stream.push(null)`]: #stream_readable_push_chunk_encoding
-[`stream.push()`]: #stream_readable_push_chunk_encoding
-[`unpipe()`]: #stream_readable_unpipe_destination
-[unpiped]: #stream_readable_unpipe_destination
-[tcp sockets]: https://nodejs.org/docs/v5.1.0/api/net.html#net_class_net_socket
-[http responses, on the client]: https://nodejs.org/docs/v5.1.0/api/http.html#http_http_incomingmessage
-[http requests, on the server]: https://nodejs.org/docs/v5.1.0/api/http.html#http_http_incomingmessage
-[http requests, on the client]: https://nodejs.org/docs/v5.1.0/api/http.html#http_class_http_clientrequest
-[http responses, on the server]: https://nodejs.org/docs/v5.1.0/api/http.html#http_class_http_serverresponse
-[fs read streams]: https://nodejs.org/docs/v5.1.0/api/fs.html#fs_class_fs_readstream
-[fs write streams]: https://nodejs.org/docs/v5.1.0/api/fs.html#fs_class_fs_writestream
-[zlib streams]: zlib.html
-[zlib]: zlib.html
-[crypto streams]: crypto.html
-[crypto]: crypto.html
-[tls.CryptoStream]: https://nodejs.org/docs/v5.1.0/api/tls.html#tls_class_cryptostream
-[process.stdin]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdin
-[stdout]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdout
-[process.stdout]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdout
-[process.stderr]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stderr
-[child process stdout and stderr]: https://nodejs.org/docs/v5.1.0/api/child_process.html#child_process_child_stdout
-[child process stdin]: https://nodejs.org/docs/v5.1.0/api/child_process.html#child_process_child_stdin
+[`'data'`]: #stream_event_data
+[`'drain'`]: #stream_event_drain
+[`'end'`]: #stream_event_end
+[`'finish'`]: #stream_event_finish
+[`'readable'`]: #stream_event_readable
+[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end
+[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter
+[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr
+[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin
+[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout
+[`stream.cork()`]: #stream_writable_cork
+[`stream.pipe()`]: #stream_readable_pipe_destination_options
+[`stream.uncork()`]: #stream_writable_uncork
+[`stream.unpipe()`]: #stream_readable_unpipe_destination
+[`stream.wrap()`]: #stream_readable_wrap_stream
+[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream
+[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor
[API for Stream Consumers]: #stream_api_for_stream_consumers
[API for Stream Implementors]: #stream_api_for_stream_implementors
-[Readable]: #stream_class_stream_readable
-[Writable]: #stream_class_stream_writable
+[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin
+[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout
+[Compatibility]: #stream_compatibility_with_older_node_js_versions
+[crypto]: crypto.html
[Duplex]: #stream_class_stream_duplex
+[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream
+[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream
+[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest
+[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse
+[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage
+[Object mode]: #stream_object_mode
+[Readable]: #stream_class_stream_readable
+[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2
+[stream-_flush]: #stream_transform_flush_callback
+[stream-_read]: #stream_readable_read_size_1
+[stream-_transform]: #stream_transform_transform_chunk_encoding_callback
+[stream-_write]: #stream_writable_write_chunk_encoding_callback_1
+[stream-_writev]: #stream_writable_writev_chunks_callback
+[stream-end]: #stream_writable_end_chunk_encoding_callback
+[stream-pause]: #stream_readable_pause
+[stream-push]: #stream_readable_push_chunk_encoding
+[stream-read]: #stream_readable_read_size
+[stream-resume]: #stream_readable_resume
+[stream-write]: #stream_writable_write_chunk_encoding_callback
+[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket
[Transform]: #stream_class_stream_transform
-[`end`]: #stream_event_end
-[`finish`]: #stream_event_finish
-[`_read(size)`]: #stream_readable_read_size_1
-[`_read()`]: #stream_readable_read_size_1
-[_read]: #stream_readable_read_size_1
-[`writable.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
-[`write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback
-[`write()`]: #stream_writable_write_chunk_encoding_callback
-[`stream.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
-[`_write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback_1
-[`_write()`]: #stream_writable_write_chunk_encoding_callback_1
-[_write]: #stream_writable_write_chunk_encoding_callback_1
-[`util.inherits`]: https://nodejs.org/docs/v5.1.0/api/util.html#util_util_inherits_constructor_superconstructor
-[`end()`]: #stream_writable_end_chunk_encoding_callback
-[`'data'` event]: #stream_event_data
-[`resume()`]: #stream_readable_resume
-[`readable.resume()`]: #stream_readable_resume
-[`pause()`]: #stream_readable_pause
-[`unpipe()`]: #stream_readable_unpipe_destination
-[`pipe()`]: #stream_readable_pipe_destination_options
+[Writable]: #stream_class_stream_writable
+[zlib]: zlib.html
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
index 69558af037cd66..736693b8400fed 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js
@@ -6,22 +6,21 @@
'use strict';
/**/
+
var objectKeys = Object.keys || function (obj) {
var keys = [];
- for (var key in obj) keys.push(key);
- return keys;
-}
+ for (var key in obj) {
+ keys.push(key);
+ }return keys;
+};
/* */
-
module.exports = Duplex;
/**/
var processNextTick = require('process-nextick-args');
/* */
-
-
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
@@ -35,26 +34,21 @@ util.inherits(Duplex, Readable);
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
- if (!Duplex.prototype[method])
- Duplex.prototype[method] = Writable.prototype[method];
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
function Duplex(options) {
- if (!(this instanceof Duplex))
- return new Duplex(options);
+ if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
- if (options && options.readable === false)
- this.readable = false;
+ if (options && options.readable === false) this.readable = false;
- if (options && options.writable === false)
- this.writable = false;
+ if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
- if (options && options.allowHalfOpen === false)
- this.allowHalfOpen = false;
+ if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
@@ -63,8 +57,7 @@ function Duplex(options) {
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
- if (this.allowHalfOpen || this._writableState.ended)
- return;
+ if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
@@ -75,8 +68,8 @@ function onEndNT(self) {
self.end();
}
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
index bddfdd01537a40..d06f71f1868d77 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js
@@ -16,12 +16,11 @@ util.inherits = require('inherits');
util.inherits(PassThrough, Transform);
function PassThrough(options) {
- if (!(this instanceof PassThrough))
- return new PassThrough(options);
+ if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
-PassThrough.prototype._transform = function(chunk, encoding, cb) {
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
-};
+};
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
index 50852aee7e6e02..54a9d5c553d69e 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js
@@ -6,12 +6,10 @@ module.exports = Readable;
var processNextTick = require('process-nextick-args');
/* */
-
/**/
var isArray = require('isarray');
/* */
-
/**/
var Buffer = require('buffer').Buffer;
/* */
@@ -21,21 +19,20 @@ Readable.ReadableState = ReadableState;
var EE = require('events');
/**/
-var EElistenerCount = function(emitter, type) {
+var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/* */
-
-
/**/
var Stream;
-(function (){try{
- Stream = require('st' + 'ream');
-}catch(_){}finally{
- if (!Stream)
- Stream = require('events').EventEmitter;
-}}())
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
/* */
var Buffer = require('buffer').Buffer;
@@ -45,11 +42,9 @@ var util = require('core-util-is');
util.inherits = require('inherits');
/* */
-
-
/**/
var debugUtil = require('util');
-var debug;
+var debug = undefined;
if (debugUtil && debugUtil.debuglog) {
debug = debugUtil.debuglog('stream');
} else {
@@ -71,17 +66,16 @@ function ReadableState(options, stream) {
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.readableObjectMode;
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.buffer = [];
this.length = 0;
@@ -103,6 +97,7 @@ function ReadableState(options, stream) {
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
+ this.resumeScheduled = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
@@ -122,8 +117,7 @@ function ReadableState(options, stream) {
this.decoder = null;
this.encoding = null;
if (options.encoding) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
@@ -133,16 +127,14 @@ var Duplex;
function Readable(options) {
Duplex = Duplex || require('./_stream_duplex');
- if (!(this instanceof Readable))
- return new Readable(options);
+ if (!(this instanceof Readable)) return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
- if (options && typeof options.read === 'function')
- this._read = options.read;
+ if (options && typeof options.read === 'function') this._read = options.read;
Stream.call(this);
}
@@ -151,7 +143,7 @@ function Readable(options) {
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
-Readable.prototype.push = function(chunk, encoding) {
+Readable.prototype.push = function (chunk, encoding) {
var state = this._readableState;
if (!state.objectMode && typeof chunk === 'string') {
@@ -166,12 +158,12 @@ Readable.prototype.push = function(chunk, encoding) {
};
// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function(chunk) {
+Readable.prototype.unshift = function (chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
-Readable.prototype.isPaused = function() {
+Readable.prototype.isPaused = function () {
return this._readableState.flowing === false;
};
@@ -190,26 +182,28 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
- if (state.decoder && !addToFront && !encoding)
+ var skipAdd;
+ if (state.decoder && !addToFront && !encoding) {
chunk = state.decoder.write(chunk);
+ skipAdd = !state.objectMode && chunk.length === 0;
+ }
- if (!addToFront)
- state.reading = false;
-
- // if we want the data now, just emit it.
- if (state.flowing && state.length === 0 && !state.sync) {
- stream.emit('data', chunk);
- stream.read(0);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront)
- state.buffer.unshift(chunk);
- else
- state.buffer.push(chunk);
-
- if (state.needReadable)
- emitReadable(stream);
+ if (!addToFront) state.reading = false;
+
+ // Don't add to the buffer if we've decoded to an empty string chunk and
+ // we're not in object mode
+ if (!skipAdd) {
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+
+ if (state.needReadable) emitReadable(stream);
+ }
}
maybeReadMore(stream, state);
@@ -221,7 +215,6 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
return needMoreData(state);
}
-
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
@@ -230,16 +223,12 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
- return !state.ended &&
- (state.needReadable ||
- state.length < state.highWaterMark ||
- state.length === 0);
+ return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
// backwards compatibility.
-Readable.prototype.setEncoding = function(enc) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+Readable.prototype.setEncoding = function (enc) {
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
@@ -264,29 +253,22 @@ function computeNewHighWaterMark(n) {
}
function howMuchToRead(n, state) {
- if (state.length === 0 && state.ended)
- return 0;
+ if (state.length === 0 && state.ended) return 0;
- if (state.objectMode)
- return n === 0 ? 0 : 1;
+ if (state.objectMode) return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
- if (state.flowing && state.buffer.length)
- return state.buffer[0].length;
- else
- return state.length;
+ if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length;
}
- if (n <= 0)
- return 0;
+ if (n <= 0) return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
- if (n > state.highWaterMark)
- state.highWaterMark = computeNewHighWaterMark(n);
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
@@ -302,25 +284,19 @@ function howMuchToRead(n, state) {
}
// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function(n) {
+Readable.prototype.read = function (n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
- if (typeof n !== 'number' || n > 0)
- state.emittedReadable = false;
+ if (typeof n !== 'number' || n > 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
- if (n === 0 &&
- state.needReadable &&
- (state.length >= state.highWaterMark || state.ended)) {
+ if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended)
- endReadable(this);
- else
- emitReadable(this);
+ if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
return null;
}
@@ -328,8 +304,7 @@ Readable.prototype.read = function(n) {
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
- if (state.length === 0)
- endReadable(this);
+ if (state.length === 0) endReadable(this);
return null;
}
@@ -377,8 +352,7 @@ Readable.prototype.read = function(n) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
- if (state.length === 0)
- state.needReadable = true;
+ if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
@@ -386,14 +360,10 @@ Readable.prototype.read = function(n) {
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
- if (doRead && !state.reading)
- n = howMuchToRead(nOrig, state);
+ if (doRead && !state.reading) n = howMuchToRead(nOrig, state);
var ret;
- if (n > 0)
- ret = fromList(n, state);
- else
- ret = null;
+ if (n > 0) ret = fromList(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
@@ -404,32 +374,24 @@ Readable.prototype.read = function(n) {
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
- if (state.length === 0 && !state.ended)
- state.needReadable = true;
+ if (state.length === 0 && !state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
- if (nOrig !== n && state.ended && state.length === 0)
- endReadable(this);
+ if (nOrig !== n && state.ended && state.length === 0) endReadable(this);
- if (ret !== null)
- this.emit('data', ret);
+ if (ret !== null) this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
- if (!(Buffer.isBuffer(chunk)) &&
- typeof chunk !== 'string' &&
- chunk !== null &&
- chunk !== undefined &&
- !state.objectMode) {
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
-
function onEofChunk(stream, state) {
if (state.ended) return;
if (state.decoder) {
@@ -454,10 +416,7 @@ function emitReadable(stream) {
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
- if (state.sync)
- processNextTick(emitReadable_, stream);
- else
- emitReadable_(stream);
+ if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream);
}
}
@@ -467,7 +426,6 @@ function emitReadable_(stream) {
flow(stream);
}
-
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
@@ -483,15 +441,12 @@ function maybeReadMore(stream, state) {
function maybeReadMore_(stream, state) {
var len = state.length;
- while (!state.reading && !state.flowing && !state.ended &&
- state.length < state.highWaterMark) {
+ while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
- break;
- else
- len = state.length;
+ break;else len = state.length;
}
state.readingMore = false;
}
@@ -500,11 +455,11 @@ function maybeReadMore_(stream, state) {
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function(n) {
+Readable.prototype._read = function (n) {
this.emit('error', new Error('not implemented'));
};
-Readable.prototype.pipe = function(dest, pipeOpts) {
+Readable.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
@@ -522,15 +477,10 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
- var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
- dest !== process.stdout &&
- dest !== process.stderr;
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
- if (state.endEmitted)
- processNextTick(endFn);
- else
- src.once('end', endFn);
+ if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
@@ -572,9 +522,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
- if (state.awaitDrain &&
- (!dest._writableState || dest._writableState.needDrain))
- ondrain();
+ if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
src.on('data', ondata);
@@ -585,10 +533,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
- if (state.pipesCount === 1 &&
- state.pipes[0] === dest &&
- src.listenerCount('data') === 1 &&
- !cleanedUp) {
+ if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) {
debug('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
}
@@ -602,18 +547,11 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
- if (EElistenerCount(dest, 'error') === 0)
- dest.emit('error', er);
+ if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
- if (!dest._events || !dest._events.error)
- dest.on('error', onerror);
- else if (isArray(dest._events.error))
- dest._events.error.unshift(onerror);
- else
- dest._events.error = [onerror, dest._events.error];
-
+ if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
@@ -646,11 +584,10 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
};
function pipeOnDrain(src) {
- return function() {
+ return function () {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain)
- state.awaitDrain--;
+ if (state.awaitDrain) state.awaitDrain--;
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow(src);
@@ -658,29 +595,24 @@ function pipeOnDrain(src) {
};
}
-
-Readable.prototype.unpipe = function(dest) {
+Readable.prototype.unpipe = function (dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
- if (state.pipesCount === 0)
- return this;
+ if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
- if (dest && dest !== state.pipes)
- return this;
+ if (dest && dest !== state.pipes) return this;
- if (!dest)
- dest = state.pipes;
+ if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
- if (dest)
- dest.emit('unpipe', this);
+ if (dest) dest.emit('unpipe', this);
return this;
}
@@ -694,20 +626,18 @@ Readable.prototype.unpipe = function(dest) {
state.pipesCount = 0;
state.flowing = false;
- for (var i = 0; i < len; i++)
- dests[i].emit('unpipe', this);
- return this;
+ for (var _i = 0; _i < len; _i++) {
+ dests[_i].emit('unpipe', this);
+ }return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
- if (i === -1)
- return this;
+ if (i === -1) return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
- if (state.pipesCount === 1)
- state.pipes = state.pipes[0];
+ if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this);
@@ -716,7 +646,7 @@ Readable.prototype.unpipe = function(dest) {
// set up data events if they are asked for
// Ensure readable listeners eventually get something
-Readable.prototype.on = function(ev, fn) {
+Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
@@ -725,7 +655,7 @@ Readable.prototype.on = function(ev, fn) {
this.resume();
}
- if (ev === 'readable' && this.readable) {
+ if (ev === 'readable' && !this._readableState.endEmitted) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
@@ -750,7 +680,7 @@ function nReadingNextTick(self) {
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function() {
+Readable.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
@@ -776,11 +706,10 @@ function resume_(stream, state) {
state.resumeScheduled = false;
stream.emit('resume');
flow(stream);
- if (state.flowing && !state.reading)
- stream.read(0);
+ if (state.flowing && !state.reading) stream.read(0);
}
-Readable.prototype.pause = function() {
+Readable.prototype.pause = function () {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
@@ -803,32 +732,27 @@ function flow(stream) {
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function(stream) {
+Readable.prototype.wrap = function (stream) {
var state = this._readableState;
var paused = false;
var self = this;
- stream.on('end', function() {
+ stream.on('end', function () {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
- if (chunk && chunk.length)
- self.push(chunk);
+ if (chunk && chunk.length) self.push(chunk);
}
self.push(null);
});
- stream.on('data', function(chunk) {
+ stream.on('data', function (chunk) {
debug('wrapped data');
- if (state.decoder)
- chunk = state.decoder.write(chunk);
+ if (state.decoder) chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
- if (state.objectMode && (chunk === null || chunk === undefined))
- return;
- else if (!state.objectMode && (!chunk || !chunk.length))
- return;
+ if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = self.push(chunk);
if (!ret) {
@@ -841,21 +765,23 @@ Readable.prototype.wrap = function(stream) {
// important when wrapping filters and duplexes.
for (var i in stream) {
if (this[i] === undefined && typeof stream[i] === 'function') {
- this[i] = function(method) { return function() {
- return stream[method].apply(stream, arguments);
- }; }(i);
+ this[i] = function (method) {
+ return function () {
+ return stream[method].apply(stream, arguments);
+ };
+ }(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
- forEach(events, function(ev) {
+ forEach(events, function (ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
- self._read = function(n) {
+ self._read = function (n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
@@ -866,7 +792,6 @@ Readable.prototype.wrap = function(stream) {
return self;
};
-
// exposed for testing purposes only.
Readable._fromList = fromList;
@@ -880,21 +805,11 @@ function fromList(n, state) {
var ret;
// nothing in the list, definitely empty.
- if (list.length === 0)
- return null;
+ if (list.length === 0) return null;
- if (length === 0)
- ret = null;
- else if (objectMode)
- ret = list.shift();
- else if (!n || n >= length) {
+ if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) {
// read it all, truncate the array.
- if (stringMode)
- ret = list.join('');
- else if (list.length === 1)
- ret = list[0];
- else
- ret = Buffer.concat(list, length);
+ if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
@@ -910,25 +825,16 @@ function fromList(n, state) {
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
- if (stringMode)
- ret = '';
- else
- ret = new Buffer(n);
+ if (stringMode) ret = '';else ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
- if (stringMode)
- ret += buf.slice(0, cpy);
- else
- buf.copy(ret, c, 0, cpy);
+ if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy);
- if (cpy < buf.length)
- list[0] = buf.slice(cpy);
- else
- list.shift();
+ if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift();
c += cpy;
}
@@ -943,8 +849,7 @@ function endReadable(stream) {
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
- if (state.length > 0)
- throw new Error('endReadable called on non-empty stream');
+ if (state.length > 0) throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
@@ -961,15 +866,15 @@ function endReadableNT(state, stream) {
}
}
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
-function indexOf (xs, x) {
+function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
index 3675d18d915610..625cdc17698059 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js
@@ -53,9 +53,8 @@ util.inherits = require('inherits');
util.inherits(Transform, Duplex);
-
function TransformState(stream) {
- this.afterTransform = function(er, data) {
+ this.afterTransform = function (er, data) {
return afterTransform(stream, er, data);
};
@@ -63,6 +62,7 @@ function TransformState(stream) {
this.transforming = false;
this.writecb = null;
this.writechunk = null;
+ this.writeencoding = null;
}
function afterTransform(stream, er, data) {
@@ -71,17 +71,14 @@ function afterTransform(stream, er, data) {
var cb = ts.writecb;
- if (!cb)
- return stream.emit('error', new Error('no writecb in Transform class'));
+ if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
- if (data !== null && data !== undefined)
- stream.push(data);
+ if (data !== null && data !== undefined) stream.push(data);
- if (cb)
- cb(er);
+ cb(er);
var rs = stream._readableState;
rs.reading = false;
@@ -90,10 +87,8 @@ function afterTransform(stream, er, data) {
}
}
-
function Transform(options) {
- if (!(this instanceof Transform))
- return new Transform(options);
+ if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
@@ -111,24 +106,19 @@ function Transform(options) {
this._readableState.sync = false;
if (options) {
- if (typeof options.transform === 'function')
- this._transform = options.transform;
+ if (typeof options.transform === 'function') this._transform = options.transform;
- if (typeof options.flush === 'function')
- this._flush = options.flush;
+ if (typeof options.flush === 'function') this._flush = options.flush;
}
- this.once('prefinish', function() {
- if (typeof this._flush === 'function')
- this._flush(function(er) {
- done(stream, er);
- });
- else
- done(stream);
+ this.once('prefinish', function () {
+ if (typeof this._flush === 'function') this._flush(function (er) {
+ done(stream, er);
+ });else done(stream);
});
}
-Transform.prototype.push = function(chunk, encoding) {
+Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
@@ -143,28 +133,25 @@ Transform.prototype.push = function(chunk, encoding) {
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
-Transform.prototype._transform = function(chunk, encoding, cb) {
+Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('not implemented');
};
-Transform.prototype._write = function(chunk, encoding, cb) {
+Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
- if (ts.needTransform ||
- rs.needReadable ||
- rs.length < rs.highWaterMark)
- this._read(rs.highWaterMark);
+ if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
-Transform.prototype._read = function(n) {
+Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
@@ -177,21 +164,17 @@ Transform.prototype._read = function(n) {
}
};
-
function done(stream, er) {
- if (er)
- return stream.emit('error', er);
+ if (er) return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
- if (ws.length)
- throw new Error('calling transform done when ws.length != 0');
+ if (ws.length) throw new Error('calling transform done when ws.length != 0');
- if (ts.transforming)
- throw new Error('calling transform done when still transforming');
+ if (ts.transforming) throw new Error('calling transform done when still transforming');
return stream.push(null);
-}
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
index 1fa5eb695adde6..95916c992a9507 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js
@@ -10,6 +10,9 @@ module.exports = Writable;
var processNextTick = require('process-nextick-args');
/* */
+/**/
+var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick;
+/* */
/**/
var Buffer = require('buffer').Buffer;
@@ -17,29 +20,26 @@ var Buffer = require('buffer').Buffer;
Writable.WritableState = WritableState;
-
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
/* */
-
/**/
var internalUtil = {
deprecate: require('util-deprecate')
};
/* */
-
-
/**/
var Stream;
-(function (){try{
- Stream = require('st' + 'ream');
-}catch(_){}finally{
- if (!Stream)
- Stream = require('events').EventEmitter;
-}}())
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
/* */
var Buffer = require('buffer').Buffer;
@@ -65,18 +65,17 @@ function WritableState(options, stream) {
// contains buffers or objects.
this.objectMode = !!options.objectMode;
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.writableObjectMode;
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
@@ -120,7 +119,7 @@ function WritableState(options, stream) {
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
- this.onwrite = function(er) {
+ this.onwrite = function (er) {
onwrite(stream, er);
};
@@ -143,6 +142,14 @@ function WritableState(options, stream) {
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
+
+ // count buffered requests
+ this.bufferedRequestCount = 0;
+
+ // create the two objects needed to store the corked requests
+ // they are not a linked list, as no new elements are inserted in there
+ this.corkedRequestsFree = new CorkedRequest(this);
+ this.corkedRequestsFree.next = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function writableStateGetBuffer() {
@@ -155,15 +162,15 @@ WritableState.prototype.getBuffer = function writableStateGetBuffer() {
return out;
};
-(function (){try {
-Object.defineProperty(WritableState.prototype, 'buffer', {
- get: internalUtil.deprecate(function() {
- return this.getBuffer();
- }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' +
- 'instead.')
-});
-}catch(_){}}());
-
+(function () {
+ try {
+ Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: internalUtil.deprecate(function () {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.')
+ });
+ } catch (_) {}
+})();
var Duplex;
function Writable(options) {
@@ -171,8 +178,7 @@ function Writable(options) {
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
- if (!(this instanceof Writable) && !(this instanceof Duplex))
- return new Writable(options);
+ if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options);
this._writableState = new WritableState(options, this);
@@ -180,22 +186,19 @@ function Writable(options) {
this.writable = true;
if (options) {
- if (typeof options.write === 'function')
- this._write = options.write;
+ if (typeof options.write === 'function') this._write = options.write;
- if (typeof options.writev === 'function')
- this._writev = options.writev;
+ if (typeof options.writev === 'function') this._writev = options.writev;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
-Writable.prototype.pipe = function() {
+Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
-
function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
@@ -211,11 +214,7 @@ function writeAfterEnd(stream, cb) {
function validChunk(stream, state, chunk, cb) {
var valid = true;
- if (!(Buffer.isBuffer(chunk)) &&
- typeof chunk !== 'string' &&
- chunk !== null &&
- chunk !== undefined &&
- !state.objectMode) {
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
processNextTick(cb, er);
@@ -224,7 +223,7 @@ function validChunk(stream, state, chunk, cb) {
return valid;
}
-Writable.prototype.write = function(chunk, encoding, cb) {
+Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
@@ -233,17 +232,11 @@ Writable.prototype.write = function(chunk, encoding, cb) {
encoding = null;
}
- if (Buffer.isBuffer(chunk))
- encoding = 'buffer';
- else if (!encoding)
- encoding = state.defaultEncoding;
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
- if (typeof cb !== 'function')
- cb = nop;
+ if (typeof cb !== 'function') cb = nop;
- if (state.ended)
- writeAfterEnd(this, cb);
- else if (validChunk(this, state, chunk, cb)) {
+ if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
}
@@ -251,42 +244,31 @@ Writable.prototype.write = function(chunk, encoding, cb) {
return ret;
};
-Writable.prototype.cork = function() {
+Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
-Writable.prototype.uncork = function() {
+Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
- if (!state.writing &&
- !state.corked &&
- !state.finished &&
- !state.bufferProcessing &&
- state.bufferedRequest)
- clearBuffer(this, state);
+ if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
- if (typeof encoding === 'string')
- encoding = encoding.toLowerCase();
- if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64',
-'ucs2', 'ucs-2','utf16le', 'utf-16le', 'raw']
-.indexOf((encoding + '').toLowerCase()) > -1))
- throw new TypeError('Unknown encoding: ' + encoding);
+ if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
};
function decodeChunk(state, chunk, encoding) {
- if (!state.objectMode &&
- state.decodeStrings !== false &&
- typeof chunk === 'string') {
+ if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
@@ -298,16 +280,14 @@ function decodeChunk(state, chunk, encoding) {
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
- if (Buffer.isBuffer(chunk))
- encoding = 'buffer';
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
- if (!ret)
- state.needDrain = true;
+ if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
@@ -317,6 +297,7 @@ function writeOrBuffer(stream, state, chunk, encoding, cb) {
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
+ state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
@@ -329,19 +310,13 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writecb = cb;
state.writing = true;
state.sync = true;
- if (writev)
- stream._writev(chunk, state.onwrite);
- else
- stream._write(chunk, encoding, state.onwrite);
+ if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
- if (sync)
- processNextTick(cb, er);
- else
- cb(er);
+ if (sync) processNextTick(cb, er);else cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
@@ -361,30 +336,26 @@ function onwrite(stream, er) {
onwriteStateUpdate(state);
- if (er)
- onwriteError(stream, state, sync, er, cb);
- else {
+ if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state);
- if (!finished &&
- !state.corked &&
- !state.bufferProcessing &&
- state.bufferedRequest) {
+ if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
- processNextTick(afterWrite, stream, state, finished, cb);
+ /**/
+ asyncWrite(afterWrite, stream, state, finished, cb);
+ /* */
} else {
- afterWrite(stream, state, finished, cb);
- }
+ afterWrite(stream, state, finished, cb);
+ }
}
}
function afterWrite(stream, state, finished, cb) {
- if (!finished)
- onwriteDrain(stream, state);
+ if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
@@ -400,7 +371,6 @@ function onwriteDrain(stream, state) {
}
}
-
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
@@ -408,26 +378,26 @@ function clearBuffer(stream, state) {
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
- var buffer = [];
- var cbs = [];
+ var l = state.bufferedRequestCount;
+ var buffer = new Array(l);
+ var holder = state.corkedRequestsFree;
+ holder.entry = entry;
+
+ var count = 0;
while (entry) {
- cbs.push(entry.callback);
- buffer.push(entry);
+ buffer[count] = entry;
entry = entry.next;
+ count += 1;
}
- // count the one we are adding, as well.
- // TODO(isaacs) clean this up
+ doWrite(stream, state, true, state.length, buffer, '', holder.finish);
+
+ // doWrite is always async, defer these to save a bit of time
+ // as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
- doWrite(stream, state, true, state.length, buffer, '', function(err) {
- for (var i = 0; i < cbs.length; i++) {
- state.pendingcb--;
- cbs[i](err);
- }
- });
-
- // Clear buffer
+ state.corkedRequestsFree = holder.next;
+ holder.next = null;
} else {
// Slow case, write chunks one-by-one
while (entry) {
@@ -447,20 +417,21 @@ function clearBuffer(stream, state) {
}
}
- if (entry === null)
- state.lastBufferedRequest = null;
+ if (entry === null) state.lastBufferedRequest = null;
}
+
+ state.bufferedRequestCount = 0;
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
-Writable.prototype._write = function(chunk, encoding, cb) {
+Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype._writev = null;
-Writable.prototype.end = function(chunk, encoding, cb) {
+Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
@@ -472,8 +443,7 @@ Writable.prototype.end = function(chunk, encoding, cb) {
encoding = null;
}
- if (chunk !== null && chunk !== undefined)
- this.write(chunk, encoding);
+ if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
@@ -482,17 +452,11 @@ Writable.prototype.end = function(chunk, encoding, cb) {
}
// ignore unnecessary end() calls.
- if (!state.ending && !state.finished)
- endWritable(this, state, cb);
+ if (!state.ending && !state.finished) endWritable(this, state, cb);
};
-
function needFinish(state) {
- return (state.ending &&
- state.length === 0 &&
- state.bufferedRequest === null &&
- !state.finished &&
- !state.writing);
+ return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function prefinish(stream, state) {
@@ -520,10 +484,33 @@ function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
- if (state.finished)
- processNextTick(cb);
- else
- stream.once('finish', cb);
+ if (state.finished) processNextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
+ stream.writable = false;
}
+
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+ var _this = this;
+
+ this.next = null;
+ this.entry = null;
+
+ this.finish = function (err) {
+ var entry = _this.entry;
+ _this.entry = null;
+ while (entry) {
+ var cb = entry.callback;
+ state.pendingcb--;
+ cb(err);
+ entry = entry.next;
+ }
+ if (state.corkedRequestsFree) {
+ state.corkedRequestsFree.next = _this;
+ } else {
+ state.corkedRequestsFree = _this;
+ }
+ };
+}
\ No newline at end of file
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.npmignore b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.npmignore
new file mode 100644
index 00000000000000..3c3629e647f5dd
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.npmignore
@@ -0,0 +1 @@
+node_modules
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.travis.yml b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.travis.yml
new file mode 100644
index 00000000000000..cc4dba29d959a2
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - "0.8"
+ - "0.10"
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/Makefile b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/Makefile
new file mode 100644
index 00000000000000..0ecc29c402c243
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/Makefile
@@ -0,0 +1,5 @@
+
+test:
+ @node_modules/.bin/tape test.js
+
+.PHONY: test
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/README.md b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/README.md
index 052a62b8d7b7ae..16d2c59c6195f9 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/README.md
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/README.md
@@ -3,6 +3,12 @@
`Array#isArray` for older browsers.
+[](http://travis-ci.org/juliangruber/isarray)
+[](https://www.npmjs.org/package/isarray)
+
+[
+](https://ci.testling.com/juliangruber/isarray)
+
## Usage
```js
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/build/build.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/build/build.js
deleted file mode 100644
index e1856ef0943728..00000000000000
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/build/build.js
+++ /dev/null
@@ -1,208 +0,0 @@
-
-/**
- * Require the given path.
- *
- * @param {String} path
- * @return {Object} exports
- * @api public
- */
-
-function require(path, parent, orig) {
- var resolved = require.resolve(path);
-
- // lookup failed
- if (null == resolved) {
- orig = orig || path;
- parent = parent || 'root';
- var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
- err.path = orig;
- err.parent = parent;
- err.require = true;
- throw err;
- }
-
- var module = require.modules[resolved];
-
- // perform real require()
- // by invoking the module's
- // registered function
- if (!module.exports) {
- module.exports = {};
- module.client = module.component = true;
- module.call(this, module.exports, require.relative(resolved), module);
- }
-
- return module.exports;
-}
-
-/**
- * Registered modules.
- */
-
-require.modules = {};
-
-/**
- * Registered aliases.
- */
-
-require.aliases = {};
-
-/**
- * Resolve `path`.
- *
- * Lookup:
- *
- * - PATH/index.js
- * - PATH.js
- * - PATH
- *
- * @param {String} path
- * @return {String} path or null
- * @api private
- */
-
-require.resolve = function(path) {
- if (path.charAt(0) === '/') path = path.slice(1);
- var index = path + '/index.js';
-
- var paths = [
- path,
- path + '.js',
- path + '.json',
- path + '/index.js',
- path + '/index.json'
- ];
-
- for (var i = 0; i < paths.length; i++) {
- var path = paths[i];
- if (require.modules.hasOwnProperty(path)) return path;
- }
-
- if (require.aliases.hasOwnProperty(index)) {
- return require.aliases[index];
- }
-};
-
-/**
- * Normalize `path` relative to the current path.
- *
- * @param {String} curr
- * @param {String} path
- * @return {String}
- * @api private
- */
-
-require.normalize = function(curr, path) {
- var segs = [];
-
- if ('.' != path.charAt(0)) return path;
-
- curr = curr.split('/');
- path = path.split('/');
-
- for (var i = 0; i < path.length; ++i) {
- if ('..' == path[i]) {
- curr.pop();
- } else if ('.' != path[i] && '' != path[i]) {
- segs.push(path[i]);
- }
- }
-
- return curr.concat(segs).join('/');
-};
-
-/**
- * Register module at `path` with callback `definition`.
- *
- * @param {String} path
- * @param {Function} definition
- * @api private
- */
-
-require.register = function(path, definition) {
- require.modules[path] = definition;
-};
-
-/**
- * Alias a module definition.
- *
- * @param {String} from
- * @param {String} to
- * @api private
- */
-
-require.alias = function(from, to) {
- if (!require.modules.hasOwnProperty(from)) {
- throw new Error('Failed to alias "' + from + '", it does not exist');
- }
- require.aliases[to] = from;
-};
-
-/**
- * Return a require function relative to the `parent` path.
- *
- * @param {String} parent
- * @return {Function}
- * @api private
- */
-
-require.relative = function(parent) {
- var p = require.normalize(parent, '..');
-
- /**
- * lastIndexOf helper.
- */
-
- function lastIndexOf(arr, obj) {
- var i = arr.length;
- while (i--) {
- if (arr[i] === obj) return i;
- }
- return -1;
- }
-
- /**
- * The relative require() itself.
- */
-
- function localRequire(path) {
- var resolved = localRequire.resolve(path);
- return require(resolved, parent, path);
- }
-
- /**
- * Resolve relative to the parent.
- */
-
- localRequire.resolve = function(path) {
- var c = path.charAt(0);
- if ('/' == c) return path.slice(1);
- if ('.' == c) return require.normalize(p, path);
-
- // resolve deps by returning
- // the dep in the nearest "deps"
- // directory
- var segs = parent.split('/');
- var i = lastIndexOf(segs, 'deps') + 1;
- if (!i) i = 0;
- path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
- return path;
- };
-
- /**
- * Check if module is defined at `path`.
- */
-
- localRequire.exists = function(path) {
- return require.modules.hasOwnProperty(localRequire.resolve(path));
- };
-
- return localRequire;
-};
-require.register("isarray/index.js", function(exports, require, module){
-module.exports = Array.isArray || function (arr) {
- return Object.prototype.toString.call(arr) == '[object Array]';
-};
-
-});
-require.alias("isarray/index.js", "isarray/index.js");
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/index.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/index.js
index 5f5ad45d46dda9..a57f63495943a0 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/index.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/index.js
@@ -1,3 +1,5 @@
+var toString = {}.toString;
+
module.exports = Array.isArray || function (arr) {
- return Object.prototype.toString.call(arr) == '[object Array]';
+ return toString.call(arr) == '[object Array]';
};
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
index 19228ab6fdcaaf..703ea43cb4d5ac 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/package.json
@@ -1,19 +1,16 @@
{
"name": "isarray",
"description": "Array#isArray for older browsers",
- "version": "0.0.1",
+ "version": "1.0.0",
"repository": {
"type": "git",
"url": "git://github.com/juliangruber/isarray.git"
},
"homepage": "https://github.com/juliangruber/isarray",
"main": "index.js",
- "scripts": {
- "test": "tap test/*.js"
- },
"dependencies": {},
"devDependencies": {
- "tap": "*"
+ "tape": "~2.13.4"
},
"keywords": [
"browser",
@@ -26,17 +23,42 @@
"url": "http://juliangruber.com"
},
"license": "MIT",
- "_id": "isarray@0.0.1",
- "dist": {
- "shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
- "tarball": "http://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
+ "testling": {
+ "files": "test.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/17..latest",
+ "firefox/nightly",
+ "chrome/22..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
},
- "_from": "isarray@0.0.1",
- "_npmVersion": "1.2.18",
+ "scripts": {
+ "test": "tape test.js"
+ },
+ "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33",
+ "bugs": {
+ "url": "https://github.com/juliangruber/isarray/issues"
+ },
+ "_id": "isarray@1.0.0",
+ "_shasum": "bb935d48582cba168c06834957a54a3e07124f11",
+ "_from": "isarray@>=1.0.0 <1.1.0",
+ "_npmVersion": "3.3.12",
+ "_nodeVersion": "5.1.0",
"_npmUser": {
"name": "juliangruber",
"email": "julian@juliangruber.com"
},
+ "dist": {
+ "shasum": "bb935d48582cba168c06834957a54a3e07124f11",
+ "tarball": "http://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"
+ },
"maintainers": [
{
"name": "juliangruber",
@@ -44,10 +66,6 @@
}
],
"directories": {},
- "_shasum": "8a18acfca9a8f4177e09abfc6038939b05d1eedf",
- "_resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
- "bugs": {
- "url": "https://github.com/juliangruber/isarray/issues"
- },
+ "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/test.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/test.js
new file mode 100644
index 00000000000000..f7f7bcd19fec56
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/isarray/test.js
@@ -0,0 +1,19 @@
+var isArray = require('./');
+var test = require('tape');
+
+test('is array', function(t){
+ t.ok(isArray([]));
+ t.notOk(isArray({}));
+ t.notOk(isArray(null));
+ t.notOk(isArray(false));
+
+ var obj = {};
+ obj[0] = true;
+ t.notOk(isArray(obj));
+
+ var arr = [];
+ arr.foo = 'bar';
+ t.ok(isArray(arr));
+
+ t.end();
+});
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js
index 571c276783c779..a4f40f845faa65 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/index.js
@@ -8,13 +8,36 @@ if (!process.version ||
module.exports = process.nextTick;
}
-function nextTick(fn) {
- var args = new Array(arguments.length - 1);
- var i = 0;
- while (i < args.length) {
- args[i++] = arguments[i];
+function nextTick(fn, arg1, arg2, arg3) {
+ if (typeof fn !== 'function') {
+ throw new TypeError('"callback" argument must be a function');
+ }
+ var len = arguments.length;
+ var args, i;
+ switch (len) {
+ case 0:
+ case 1:
+ return process.nextTick(fn);
+ case 2:
+ return process.nextTick(function afterTickOne() {
+ fn.call(null, arg1);
+ });
+ case 3:
+ return process.nextTick(function afterTickTwo() {
+ fn.call(null, arg1, arg2);
+ });
+ case 4:
+ return process.nextTick(function afterTickThree() {
+ fn.call(null, arg1, arg2, arg3);
+ });
+ default:
+ args = new Array(len - 1);
+ i = 0;
+ while (i < args.length) {
+ args[i++] = arguments[i];
+ }
+ return process.nextTick(function afterTick() {
+ fn.apply(null, args);
+ });
}
- process.nextTick(function afterTick() {
- fn.apply(null, args);
- });
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json
index 0ccf22578d2caa..211b098d4cbb95 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/node_modules/process-nextick-args/package.json
@@ -1,6 +1,6 @@
{
"name": "process-nextick-args",
- "version": "1.0.6",
+ "version": "1.0.7",
"description": "process.nextTick but always with args",
"main": "index.js",
"scripts": {
@@ -19,19 +19,19 @@
"devDependencies": {
"tap": "~0.2.6"
},
- "gitHead": "e85787b05a8c3c1adb714f332d822e9162699c78",
- "_id": "process-nextick-args@1.0.6",
- "_shasum": "0f96b001cea90b12592ce566edb97ec11e69bd05",
+ "gitHead": "5c00899ab01dd32f93ad4b5743da33da91404f39",
+ "_id": "process-nextick-args@1.0.7",
+ "_shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3",
"_from": "process-nextick-args@>=1.0.6 <1.1.0",
- "_npmVersion": "2.14.4",
- "_nodeVersion": "4.1.1",
+ "_npmVersion": "3.8.6",
+ "_nodeVersion": "5.11.0",
"_npmUser": {
"name": "cwmma",
"email": "calvin.metcalf@gmail.com"
},
"dist": {
- "shasum": "0f96b001cea90b12592ce566edb97ec11e69bd05",
- "tarball": "http://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.6.tgz"
+ "shasum": "150e20b756590ad3f91093f25a4f2ad8bff30ba3",
+ "tarball": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz"
},
"maintainers": [
{
@@ -39,7 +39,11 @@
"email": "calvin.metcalf@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/process-nextick-args-1.0.7.tgz_1462394251778_0.36989671061746776"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.6.tgz",
+ "_resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
index d6a064e6fab39b..b9fc0daecb9d84 100644
--- a/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/node_modules/readable-stream/package.json
@@ -1,23 +1,23 @@
{
"name": "readable-stream",
- "version": "2.0.5",
- "description": "Streams3, a user-land copy of the stream library from iojs v2.x",
+ "version": "2.0.6",
+ "description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.1",
- "isarray": "0.0.1",
+ "isarray": "~1.0.0",
"process-nextick-args": "~1.0.6",
"string_decoder": "~0.10.x",
"util-deprecate": "~1.0.1"
},
"devDependencies": {
"tap": "~0.2.6",
- "tape": "~4.0.0",
- "zuul": "~3.0.0"
+ "tape": "~4.5.1",
+ "zuul": "~3.9.0"
},
"scripts": {
- "test": "tap test/parallel/*.js",
+ "test": "tap test/parallel/*.js test/ours/*.js",
"browser": "npm run write-zuul && zuul -- test/browser.js",
"write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml"
},
@@ -34,23 +34,23 @@
"util": false
},
"license": "MIT",
- "gitHead": "a4f23d8e451267684a8160679ce16e16149fe72b",
+ "gitHead": "01fb5608a970b42c900b96746cadc13d27dd9d7e",
"bugs": {
"url": "https://github.com/nodejs/readable-stream/issues"
},
"homepage": "https://github.com/nodejs/readable-stream#readme",
- "_id": "readable-stream@2.0.5",
- "_shasum": "a2426f8dcd4551c77a33f96edf2886a23c829669",
+ "_id": "readable-stream@2.0.6",
+ "_shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e",
"_from": "readable-stream@>=2.0.5 <2.1.0",
- "_npmVersion": "3.3.12",
- "_nodeVersion": "5.1.1",
+ "_npmVersion": "3.6.0",
+ "_nodeVersion": "5.7.0",
"_npmUser": {
"name": "cwmma",
"email": "calvin.metcalf@gmail.com"
},
"dist": {
- "shasum": "a2426f8dcd4551c77a33f96edf2886a23c829669",
- "tarball": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.0.5.tgz"
+ "shasum": "8f90341e68a53ccc928788dacfcd11b36eb9b78e",
+ "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz"
},
"maintainers": [
{
@@ -70,7 +70,11 @@
"email": "calvin.metcalf@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/readable-stream-2.0.6.tgz_1457893507709_0.369257491780445"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.5.tgz",
+ "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/package.json b/deps/npm/node_modules/request/node_modules/bl/package.json
index 469ebc8ea3604a..23203168497fec 100644
--- a/deps/npm/node_modules/request/node_modules/bl/package.json
+++ b/deps/npm/node_modules/request/node_modules/bl/package.json
@@ -1,6 +1,6 @@
{
"name": "bl",
- "version": "1.0.2",
+ "version": "1.1.2",
"description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
"main": "bl.js",
"scripts": {
@@ -27,19 +27,19 @@
"readable-stream": "~2.0.5"
},
"devDependencies": {
- "tape": "~2.12.3",
+ "faucet": "0.0.1",
"hash_file": "~0.1.1",
- "faucet": "~0.0.1"
+ "tape": "~4.4.0"
},
- "gitHead": "9f1e7ce410e28d68c0a6f678b93b4cc2273e585f",
+ "gitHead": "ea42021059dc65fc60d7f4b9217c73431f09d23d",
"bugs": {
"url": "https://github.com/rvagg/bl/issues"
},
- "_id": "bl@1.0.2",
- "_shasum": "8c66490d825ba84d560de1f62196a29555b3a0c4",
- "_from": "bl@>=1.0.0 <1.1.0",
- "_npmVersion": "3.6.0",
- "_nodeVersion": "5.5.0",
+ "_id": "bl@1.1.2",
+ "_shasum": "fdca871a99713aa00d19e3bbba41c44787a65398",
+ "_from": "bl@>=1.1.2 <1.2.0",
+ "_npmVersion": "3.3.12",
+ "_nodeVersion": "5.3.0",
"_npmUser": {
"name": "rvagg",
"email": "rod@vagg.org"
@@ -51,13 +51,14 @@
}
],
"dist": {
- "shasum": "8c66490d825ba84d560de1f62196a29555b3a0c4",
- "tarball": "http://registry.npmjs.org/bl/-/bl-1.0.2.tgz"
+ "shasum": "fdca871a99713aa00d19e3bbba41c44787a65398",
+ "tarball": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz"
},
"_npmOperationalInternal": {
- "host": "packages-6-west.internal.npmjs.com",
- "tmp": "tmp/bl-1.0.2.tgz_1454532811740_0.7871121023781598"
+ "host": "packages-9-west.internal.npmjs.com",
+ "tmp": "tmp/bl-1.1.2.tgz_1455246621698_0.6300242957659066"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/bl/-/bl-1.0.2.tgz"
+ "_resolved": "https://registry.npmjs.org/bl/-/bl-1.1.2.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/bl/test/test.js b/deps/npm/node_modules/request/node_modules/bl/test/test.js
index 8c8d0cf2ae2d01..c95b1ba4844ef7 100644
--- a/deps/npm/node_modules/request/node_modules/bl/test/test.js
+++ b/deps/npm/node_modules/request/node_modules/bl/test/test.js
@@ -79,12 +79,39 @@ tape('multiple bytes from multiple buffers', function (t) {
tape('multiple bytes from multiple buffer lists', function (t) {
var bl = new BufferList()
- bl.append(new BufferList([new Buffer('abcd'), new Buffer('efg')]))
- bl.append(new BufferList([new Buffer('hi'), new Buffer('j')]))
+ bl.append(new BufferList([ new Buffer('abcd'), new Buffer('efg') ]))
+ bl.append(new BufferList([ new Buffer('hi'), new Buffer('j') ]))
t.equal(bl.length, 10)
t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+
+ t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
+ t.equal(bl.slice(3, 6).toString('ascii'), 'def')
+ t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
+ t.equal(bl.slice(5, 10).toString('ascii'), 'fghij')
+
+ t.end()
+})
+
+// same data as previous test, just using nested constructors
+tape('multiple bytes from crazy nested buffer lists', function (t) {
+ var bl = new BufferList()
+
+ bl.append(new BufferList([
+ new BufferList([
+ new BufferList(new Buffer('abc'))
+ , new Buffer('d')
+ , new BufferList(new Buffer('efg'))
+ ])
+ , new BufferList([ new Buffer('hi') ])
+ , new BufferList(new Buffer('j'))
+ ]))
+
+ t.equal(bl.length, 10)
+
+ t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij')
+
t.equal(bl.slice(3, 10).toString('ascii'), 'defghij')
t.equal(bl.slice(3, 6).toString('ascii'), 'def')
t.equal(bl.slice(3, 8).toString('ascii'), 'defgh')
@@ -93,6 +120,48 @@ tape('multiple bytes from multiple buffer lists', function (t) {
t.end()
})
+tape('append accepts arrays of Buffers', function (t) {
+ var bl = new BufferList()
+ bl.append(new Buffer('abc'))
+ bl.append([ new Buffer('def') ])
+ bl.append([ new Buffer('ghi'), new Buffer('jkl') ])
+ bl.append([ new Buffer('mnop'), new Buffer('qrstu'), new Buffer('vwxyz') ])
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
+tape('append accepts arrays of BufferLists', function (t) {
+ var bl = new BufferList()
+ bl.append(new Buffer('abc'))
+ bl.append([ new BufferList('def') ])
+ bl.append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
+ bl.append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
+tape('append chainable', function (t) {
+ var bl = new BufferList()
+ t.ok(bl.append(new Buffer('abcd')) === bl)
+ t.ok(bl.append([ new Buffer('abcd') ]) === bl)
+ t.ok(bl.append(new BufferList(new Buffer('abcd'))) === bl)
+ t.ok(bl.append([ new BufferList(new Buffer('abcd')) ]) === bl)
+ t.end()
+})
+
+tape('append chainable (test results)', function (t) {
+ var bl = new BufferList('abc')
+ .append([ new BufferList('def') ])
+ .append(new BufferList([ new Buffer('ghi'), new BufferList('jkl') ]))
+ .append([ new Buffer('mnop'), new BufferList([ new Buffer('qrstu'), new Buffer('vwxyz') ]) ])
+
+ t.equal(bl.length, 26)
+ t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz')
+ t.end()
+})
+
tape('consuming from multiple buffers', function (t) {
var bl = new BufferList()
@@ -128,6 +197,20 @@ tape('consuming from multiple buffers', function (t) {
t.end()
})
+tape('complete consumption', function (t) {
+ var bl = new BufferList()
+
+ bl.append(new Buffer('a'))
+ bl.append(new Buffer('b'))
+
+ bl.consume(2)
+
+ t.equal(bl.length, 0)
+ t.equal(bl._bufs.length, 0)
+
+ t.end()
+})
+
tape('test readUInt8 / readInt8', function (t) {
var buf1 = new Buffer(1)
, buf2 = new Buffer(3)
diff --git a/deps/npm/node_modules/request/node_modules/extend/package.json b/deps/npm/node_modules/request/node_modules/extend/package.json
index c8c7cac9967924..f8341433c8431e 100644
--- a/deps/npm/node_modules/request/node_modules/extend/package.json
+++ b/deps/npm/node_modules/request/node_modules/extend/package.json
@@ -55,7 +55,7 @@
},
"dist": {
"shasum": "5a474353b9f3353ddd8176dfd37b91c83a46f1d4",
- "tarball": "http://registry.npmjs.org/extend/-/extend-3.0.0.tgz"
+ "tarball": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/form-data/.dockerignore b/deps/npm/node_modules/request/node_modules/form-data/.dockerignore
new file mode 100644
index 00000000000000..c67305cf96976e
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/.dockerignore
@@ -0,0 +1,7 @@
+*.iml
+*.sublime-*
+*.un~
+.idea
+sftp-config.json
+node_modules/
+test/tmp/
diff --git a/deps/npm/node_modules/request/node_modules/form-data/.editorconfig b/deps/npm/node_modules/request/node_modules/form-data/.editorconfig
new file mode 100644
index 00000000000000..0f099897b15f2f
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/.editorconfig
@@ -0,0 +1,10 @@
+# editorconfig.org
+root = true
+
+[*]
+indent_style = space
+indent_size = 2
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
diff --git a/deps/npm/node_modules/request/node_modules/form-data/.eslintignore b/deps/npm/node_modules/request/node_modules/form-data/.eslintignore
new file mode 100644
index 00000000000000..8d87b1d267eabc
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/.eslintignore
@@ -0,0 +1 @@
+node_modules/*
diff --git a/deps/npm/node_modules/request/node_modules/form-data/Readme.md b/deps/npm/node_modules/request/node_modules/form-data/Readme.md
index 492773231f6673..e857db6e2ea8e1 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/Readme.md
+++ b/deps/npm/node_modules/request/node_modules/form-data/Readme.md
@@ -1,4 +1,4 @@
-# Form-Data [](https://gitter.im/form-data/form-data) [](https://travis-ci.org/form-data/form-data) [](https://david-dm.org/form-data/form-data)
+# Form-Data [](https://www.npmjs.com/package/form-data) [](https://gitter.im/form-data/form-data)
A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications.
@@ -7,6 +7,14 @@ The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface]
[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface
[streams2-thing]: http://nodejs.org/api/stream.html#stream_compatibility_with_older_node_versions
+[](https://travis-ci.org/form-data/form-data)
+[](https://ci.appveyor.com/project/alexindigo/form-data)
+[](https://coveralls.io/github/form-data/form-data?branch=master)
+
+[](https://david-dm.org/form-data/form-data)
+[](https://www.codacy.com/app/form-data/form-data)
+[](https://www.bithound.io/github/form-data/form-data)
+
## Install
```
diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js
index 1e7717d5641c60..8141d6589ddacc 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/lib/browser.js
@@ -1 +1,2 @@
-module.exports = FormData;
\ No newline at end of file
+/* eslint-env browser */
+module.exports = FormData;
diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
index 0cbb2e8331a871..55328b463b3bf3 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
+++ b/deps/npm/node_modules/request/node_modules/form-data/lib/form_data.js
@@ -7,29 +7,51 @@ var parseUrl = require('url').parse;
var fs = require('fs');
var mime = require('mime-types');
var async = require('async');
+var populate = require('./populate.js');
+// Public API
module.exports = FormData;
+
+// make it a Stream
+util.inherits(FormData, CombinedStream);
+
+/**
+ * Create readable "multipart/form-data" streams.
+ * Can be used to submit forms
+ * and file uploads to other web applications.
+ *
+ * @constructor
+ */
function FormData() {
+ if (!(this instanceof FormData)) {
+ throw new TypeError('Failed to construct FormData: Please use the _new_ operator, this object constructor cannot be called as a function.');
+ }
+
this._overheadLength = 0;
this._valueLength = 0;
this._lengthRetrievers = [];
CombinedStream.call(this);
}
-util.inherits(FormData, CombinedStream);
FormData.LINE_BREAK = '\r\n';
FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream';
FormData.prototype.append = function(field, value, options) {
- options = (typeof options === 'string')
- ? { filename: options }
- : options || {};
+
+ options = options || {};
+
+ // allow filename as single option
+ if (typeof options == 'string') {
+ options = {filename: options};
+ }
var append = CombinedStream.prototype.append.bind(this);
// all that streamy business can't handle numbers
- if (typeof value == 'number') value = ''+value;
+ if (typeof value == 'number') {
+ value = '' + value;
+ }
// https://github.com/felixge/node-form-data/issues/38
if (util.isArray(value)) {
@@ -40,7 +62,7 @@ FormData.prototype.append = function(field, value, options) {
}
var header = this._multiPartHeader(field, value, options);
- var footer = this._multiPartFooter(field, value, options);
+ var footer = this._multiPartFooter();
append(header);
append(value);
@@ -78,108 +100,83 @@ FormData.prototype._trackLength = function(header, value, options) {
}
// no need to bother with the length
- if (!options.knownLength)
- this._lengthRetrievers.push(function(next) {
-
- if (value.hasOwnProperty('fd')) {
-
- // take read range into a account
- // `end` = Infinity –> read file till the end
- //
- // TODO: Looks like there is bug in Node fs.createReadStream
- // it doesn't respect `end` options without `start` options
- // Fix it when node fixes it.
- // https://github.com/joyent/node/issues/7819
- if (value.end != undefined && value.end != Infinity && value.start != undefined) {
-
- // when end specified
- // no need to calculate range
- // inclusive, starts with 0
- next(null, value.end+1 - (value.start ? value.start : 0));
-
- // not that fast snoopy
- } else {
- // still need to fetch file size from fs
- fs.stat(value.path, function(err, stat) {
-
- var fileSize;
-
- if (err) {
- next(err);
- return;
- }
-
- // update final size based on the range options
- fileSize = stat.size - (value.start ? value.start : 0);
- next(null, fileSize);
+ if (!options.knownLength) {
+ this._lengthRetrievers.push(function(next) {
+
+ if (value.hasOwnProperty('fd')) {
+
+ // take read range into a account
+ // `end` = Infinity –> read file till the end
+ //
+ // TODO: Looks like there is bug in Node fs.createReadStream
+ // it doesn't respect `end` options without `start` options
+ // Fix it when node fixes it.
+ // https://github.com/joyent/node/issues/7819
+ if (value.end != undefined && value.end != Infinity && value.start != undefined) {
+
+ // when end specified
+ // no need to calculate range
+ // inclusive, starts with 0
+ next(null, value.end + 1 - (value.start ? value.start : 0));
+
+ // not that fast snoopy
+ } else {
+ // still need to fetch file size from fs
+ fs.stat(value.path, function(err, stat) {
+
+ var fileSize;
+
+ if (err) {
+ next(err);
+ return;
+ }
+
+ // update final size based on the range options
+ fileSize = stat.size - (value.start ? value.start : 0);
+ next(null, fileSize);
+ });
+ }
+
+ // or http response
+ } else if (value.hasOwnProperty('httpVersion')) {
+ next(null, +value.headers['content-length']);
+
+ // or request stream http://github.com/mikeal/request
+ } else if (value.hasOwnProperty('httpModule')) {
+ // wait till response come back
+ value.on('response', function(response) {
+ value.pause();
+ next(null, +response.headers['content-length']);
});
- }
+ value.resume();
- // or http response
- } else if (value.hasOwnProperty('httpVersion')) {
- next(null, +value.headers['content-length']);
-
- // or request stream http://github.com/mikeal/request
- } else if (value.hasOwnProperty('httpModule')) {
- // wait till response come back
- value.on('response', function(response) {
- value.pause();
- next(null, +response.headers['content-length']);
- });
- value.resume();
-
- // something else
- } else {
- next('Unknown stream');
- }
- });
+ // something else
+ } else {
+ next('Unknown stream');
+ }
+ });
+ }
};
FormData.prototype._multiPartHeader = function(field, value, options) {
// custom header specified (as string)?
// it becomes responsible for boundary
// (e.g. to handle extra CRLFs on .NET servers)
- if (options.header != null) {
+ if (options.header) {
return options.header;
}
+ var contentDisposition = this._getContentDisposition(value, options);
+ var contentType = this._getContentType(value, options);
+
var contents = '';
var headers = {
- 'Content-Disposition': ['form-data', 'name="' + field + '"'],
- 'Content-Type': []
+ // add custom disposition as third element or keep it two elements if not
+ 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []),
+ // if no content type. allow it to be empty array
+ 'Content-Type': [].concat(contentType || [])
};
- // fs- and request- streams have path property
- // or use custom filename and/or contentType
- // TODO: Use request's response mime-type
- if (options.filename || value.path) {
- headers['Content-Disposition'].push(
- 'filename="' + path.basename(options.filename || value.path) + '"'
- );
- headers['Content-Type'].push(
- options.contentType ||
- mime.lookup(options.filename || value.path) ||
- FormData.DEFAULT_CONTENT_TYPE
- );
- // http response has not
- } else if (value.readable && value.hasOwnProperty('httpVersion')) {
- headers['Content-Disposition'].push(
- 'filename="' + path.basename(value.client._httpMessage.path) + '"'
- );
- headers['Content-Type'].push(
- options.contentType ||
- value.headers['content-type'] ||
- FormData.DEFAULT_CONTENT_TYPE
- );
- } else if (Buffer.isBuffer(value)) {
- headers['Content-Type'].push(
- options.contentType ||
- FormData.DEFAULT_CONTENT_TYPE
- );
- } else if (options.contentType) {
- headers['Content-Type'].push(options.contentType);
- }
-
for (var prop in headers) {
if (headers[prop].length) {
contents += prop + ': ' + headers[prop].join('; ') + FormData.LINE_BREAK;
@@ -189,7 +186,55 @@ FormData.prototype._multiPartHeader = function(field, value, options) {
return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK;
};
-FormData.prototype._multiPartFooter = function(field, value, options) {
+FormData.prototype._getContentDisposition = function(value, options) {
+
+ var contentDisposition;
+
+ // custom filename takes precedence
+ // fs- and request- streams have path property
+ var filename = options.filename || value.path;
+
+ // or try http response
+ if (!filename && value.readable && value.hasOwnProperty('httpVersion')) {
+ filename = value.client._httpMessage.path;
+ }
+
+ if (filename) {
+ contentDisposition = 'filename="' + path.basename(filename) + '"';
+ }
+
+ return contentDisposition;
+};
+
+FormData.prototype._getContentType = function(value, options) {
+
+ // use custom content-type above all
+ var contentType = options.contentType;
+
+ // or try `path` from fs-, request- streams
+ if (!contentType && value.path) {
+ contentType = mime.lookup(value.path);
+ }
+
+ // or if it's http-reponse
+ if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) {
+ contentType = value.headers['content-type'];
+ }
+
+ // or guess it from the filename
+ if (!contentType && options.filename) {
+ contentType = mime.lookup(options.filename);
+ }
+
+ // fallback to the default content type if `value` is not simple value
+ if (!contentType && typeof value == 'object') {
+ contentType = FormData.DEFAULT_CONTENT_TYPE;
+ }
+
+ return contentType;
+};
+
+FormData.prototype._multiPartFooter = function() {
return function(next) {
var footer = FormData.LINE_BREAK;
@@ -207,27 +252,30 @@ FormData.prototype._lastBoundary = function() {
};
FormData.prototype.getHeaders = function(userHeaders) {
+ var header;
var formHeaders = {
'content-type': 'multipart/form-data; boundary=' + this.getBoundary()
};
- for (var header in userHeaders) {
- formHeaders[header.toLowerCase()] = userHeaders[header];
+ for (header in userHeaders) {
+ if (userHeaders.hasOwnProperty(header)) {
+ formHeaders[header.toLowerCase()] = userHeaders[header];
+ }
}
return formHeaders;
-}
+};
FormData.prototype.getCustomHeaders = function(contentType) {
- contentType = contentType ? contentType : 'multipart/form-data';
+ contentType = contentType ? contentType : 'multipart/form-data';
- var formHeaders = {
- 'content-type': contentType + '; boundary=' + this.getBoundary(),
- 'content-length': this.getLengthSync()
- };
+ var formHeaders = {
+ 'content-type': contentType + '; boundary=' + this.getBoundary(),
+ 'content-length': this.getLengthSync()
+ };
- return formHeaders;
-}
+ return formHeaders;
+};
FormData.prototype.getBoundary = function() {
if (!this._boundary) {
@@ -251,7 +299,7 @@ FormData.prototype._generateBoundary = function() {
// Note: getLengthSync DOESN'T calculate streams length
// As workaround one can calculate file size manually
// and add it as knownLength option
-FormData.prototype.getLengthSync = function(debug) {
+FormData.prototype.getLengthSync = function() {
var knownLength = this._overheadLength + this._valueLength;
// Don't get confused, there are 3 "internal" streams for each keyval pair
@@ -260,9 +308,9 @@ FormData.prototype.getLengthSync = function(debug) {
knownLength += this._lastBoundary().length;
}
- // https://github.com/felixge/node-form-data/issues/40
+ // https://github.com/form-data/form-data/issues/40
if (this._lengthRetrievers.length) {
- // Some async length retrivers are present
+ // Some async length retrievers are present
// therefore synchronous length calculation is false.
// Please use getLength(callback) to get proper length
this._error(new Error('Cannot calculate proper length in synchronous way.'));
@@ -298,26 +346,25 @@ FormData.prototype.getLength = function(cb) {
};
FormData.prototype.submit = function(params, cb) {
-
var request
, options
- , defaults = {
- method : 'post'
- };
+ , defaults = {method: 'post'}
+ ;
// parse provided url if it's string
// or treat it as options object
if (typeof params == 'string') {
- params = parseUrl(params);
+ params = parseUrl(params);
options = populate({
port: params.port,
path: params.pathname,
host: params.hostname
}, defaults);
- }
- else // use custom params
- {
+
+ // use custom params
+ } else {
+
options = populate(params, defaults);
// if no port provided use default one
if (!options.port) {
@@ -337,8 +384,10 @@ FormData.prototype.submit = function(params, cb) {
// get content length and fire away
this.getLength(function(err, length) {
-
- // TODO: Add chunked encoding when no length (if err)
+ if (err) {
+ this._error(err);
+ return;
+ }
// add content length
request.setHeader('Content-Length', length);
@@ -354,21 +403,9 @@ FormData.prototype.submit = function(params, cb) {
};
FormData.prototype._error = function(err) {
- if (this.error) return;
-
- this.error = err;
- this.pause();
- this.emit('error', err);
-};
-
-/*
- * Santa's little helpers
- */
-
-// populates missing values
-function populate(dst, src) {
- for (var prop in src) {
- if (!dst[prop]) dst[prop] = src[prop];
+ if (!this.error) {
+ this.error = err;
+ this.pause();
+ this.emit('error', err);
}
- return dst;
-}
+};
diff --git a/deps/npm/node_modules/request/node_modules/form-data/lib/populate.js b/deps/npm/node_modules/request/node_modules/form-data/lib/populate.js
new file mode 100644
index 00000000000000..6f64a6d32cf186
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/lib/populate.js
@@ -0,0 +1,9 @@
+// populates missing values
+module.exports = function(dst, src) {
+ for (var prop in src) {
+ if (src.hasOwnProperty(prop) && !dst[prop]) {
+ dst[prop] = src[prop];
+ }
+ }
+ return dst;
+};
diff --git a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
index 369755e2948420..cdcd14153b7dff 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
+++ b/deps/npm/node_modules/request/node_modules/form-data/node_modules/async/package.json
@@ -88,7 +88,7 @@
"homepage": "https://github.com/caolan/async#readme",
"_id": "async@1.5.2",
"_shasum": "ec6a61ae56480c0c3cb241c95618e20892f9672a",
- "_from": "async@>=1.4.0 <2.0.0",
+ "_from": "async@>=1.5.2 <2.0.0",
"_npmVersion": "3.5.2",
"_nodeVersion": "4.2.3",
"_npmUser": {
@@ -115,7 +115,7 @@
],
"dist": {
"shasum": "ec6a61ae56480c0c3cb241c95618e20892f9672a",
- "tarball": "http://registry.npmjs.org/async/-/async-1.5.2.tgz"
+ "tarball": "https://registry.npmjs.org/async/-/async-1.5.2.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz",
diff --git a/deps/npm/node_modules/request/node_modules/form-data/package.json b/deps/npm/node_modules/request/node_modules/form-data/package.json
index 662e628b206630..95ab6c1dd6da8b 100644
--- a/deps/npm/node_modules/request/node_modules/form-data/package.json
+++ b/deps/npm/node_modules/request/node_modules/form-data/package.json
@@ -6,7 +6,7 @@
},
"name": "form-data",
"description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.",
- "version": "1.0.0-rc3",
+ "version": "1.0.0-rc4",
"repository": {
"type": "git",
"url": "git://github.com/form-data/form-data.git"
@@ -14,44 +14,59 @@
"main": "./lib/form_data",
"browser": "./lib/browser",
"scripts": {
- "test": "./test/run.js"
+ "pretest": "rimraf coverage test/tmp",
+ "test": "istanbul cover --report none test/run.js",
+ "posttest": "istanbul report",
+ "lint": "eslint lib/*.js test/*.js test/**/*.js",
+ "predebug": "rimraf coverage test/tmp",
+ "debug": "verbose=1 ./test/run.js",
+ "check": "istanbul check-coverage coverage/coverage*.json",
+ "coverage": "codacy-coverage < ./coverage/lcov.info; true"
},
"pre-commit": [
- "test"
+ "lint",
+ "test",
+ "check"
],
"engines": {
"node": ">= 0.10"
},
"dependencies": {
- "async": "^1.4.0",
+ "async": "^1.5.2",
"combined-stream": "^1.0.5",
- "mime-types": "^2.1.3"
+ "mime-types": "^2.1.10"
},
"license": "MIT",
"devDependencies": {
+ "codacy-coverage": "^1.1.3",
+ "coveralls": "^2.11.8",
+ "cross-spawn": "^2.1.5",
+ "eslint": "^2.4.0",
"fake": "^0.2.2",
"far": "^0.0.7",
"formidable": "^1.0.17",
- "pre-commit": "^1.0.10",
- "request": "^2.60.0"
+ "istanbul": "^0.4.2",
+ "pre-commit": "^1.1.2",
+ "request": "^2.69.0",
+ "rimraf": "^2.5.2"
},
- "gitHead": "c174f1b7f3a78a00ec5af0360469280445e37804",
+ "gitHead": "f73996e0508ee2d4b2b376276adfac1de4188ac2",
"bugs": {
"url": "https://github.com/form-data/form-data/issues"
},
"homepage": "https://github.com/form-data/form-data#readme",
- "_id": "form-data@1.0.0-rc3",
- "_shasum": "d35bc62e7fbc2937ae78f948aaa0d38d90607577",
+ "_id": "form-data@1.0.0-rc4",
+ "_shasum": "05ac6bc22227b43e4461f488161554699d4f8b5e",
"_from": "form-data@>=1.0.0-rc3 <1.1.0",
- "_npmVersion": "2.11.0",
- "_nodeVersion": "2.2.1",
+ "_npmVersion": "2.14.9",
+ "_nodeVersion": "0.12.11",
"_npmUser": {
- "name": "dylanpiercey",
- "email": "pierceydylan@gmail.com"
+ "name": "alexindigo",
+ "email": "iam@alexindigo.com"
},
"dist": {
- "shasum": "d35bc62e7fbc2937ae78f948aaa0d38d90607577",
- "tarball": "http://registry.npmjs.org/form-data/-/form-data-1.0.0-rc3.tgz"
+ "shasum": "05ac6bc22227b43e4461f488161554699d4f8b5e",
+ "tarball": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc4.tgz"
},
"maintainers": [
{
@@ -79,7 +94,11 @@
"email": "pierceydylan@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/form-data-1.0.0-rc4.tgz_1458059747097_0.14101114077493548"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc3.tgz",
+ "_resolved": "https://registry.npmjs.org/form-data/-/form-data-1.0.0-rc4.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/form-data/wercker.yml b/deps/npm/node_modules/request/node_modules/form-data/wercker.yml
new file mode 100644
index 00000000000000..6b118d1e31aa73
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/form-data/wercker.yml
@@ -0,0 +1,36 @@
+# This references the default nodejs container from
+# the Docker Hub: https://registry.hub.docker.com/_/node/
+# If you want Nodesource's container you would reference nodesource/node
+# Read more about containers on our dev center
+# http://devcenter.wercker.com/docs/containers/index.html
+box: node
+# This is the build pipeline. Pipelines are the core of wercker
+# Read more about pipelines on our dev center
+# http://devcenter.wercker.com/docs/pipelines/index.html
+
+# You can also use services such as databases. Read more on our dev center:
+# http://devcenter.wercker.com/docs/services/index.html
+# services:
+ # - postgres
+ # http://devcenter.wercker.com/docs/services/postgresql.html
+
+ # - mongodb
+ # http://devcenter.wercker.com/docs/services/mongodb.html
+build:
+ # The steps that will be executed on build
+ # Steps make up the actions in your pipeline
+ # Read more about steps on our dev center:
+ # http://devcenter.wercker.com/docs/steps/index.html
+ steps:
+ # A step that executes `npm install` command
+ - npm-install
+ # A step that executes `npm test` command
+ - npm-test
+
+ # A custom script step, name value is used in the UI
+ # and the code value contains the command that get executed
+ - script:
+ name: echo nodejs information
+ code: |
+ echo "node version $(node -v) running"
+ echo "npm version $(npm -v) running"
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
index b6a9ceaea0fa60..de25e5c33413d7 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/ansi-styles/package.json
@@ -1,6 +1,6 @@
{
"name": "ansi-styles",
- "version": "2.1.0",
+ "version": "2.2.1",
"description": "ANSI escape codes for styling strings in the terminal",
"license": "MIT",
"repository": {
@@ -16,10 +16,6 @@
{
"name": "sindresorhus",
"email": "sindresorhus@gmail.com"
- },
- {
- "name": "jbnicolai",
- "email": "jappelman@xebia.com"
}
],
"engines": {
@@ -56,25 +52,29 @@
"devDependencies": {
"mocha": "*"
},
- "gitHead": "18421cbe4a2d93359ec2599a894f704be126d066",
+ "gitHead": "95c59b23be760108b6530ca1c89477c21b258032",
"bugs": {
"url": "https://github.com/chalk/ansi-styles/issues"
},
- "homepage": "https://github.com/chalk/ansi-styles",
- "_id": "ansi-styles@2.1.0",
- "_shasum": "990f747146927b559a932bf92959163d60c0d0e2",
- "_from": "ansi-styles@>=2.1.0 <3.0.0",
- "_npmVersion": "2.10.1",
- "_nodeVersion": "0.12.4",
+ "homepage": "https://github.com/chalk/ansi-styles#readme",
+ "_id": "ansi-styles@2.2.1",
+ "_shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe",
+ "_from": "ansi-styles@>=2.2.1 <3.0.0",
+ "_npmVersion": "3.8.3",
+ "_nodeVersion": "4.3.0",
"_npmUser": {
- "name": "jbnicolai",
- "email": "jappelman@xebia.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
"dist": {
- "shasum": "990f747146927b559a932bf92959163d60c0d0e2",
- "tarball": "http://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz"
+ "shasum": "b432dd3358b634cf75e1e4664368240533c1ddbe",
+ "tarball": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/ansi-styles-2.2.1.tgz_1459197317833_0.9694824463222176"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.1.0.tgz",
+ "_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
index d0f6c0f9df51f4..7714d5d14aafdb 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/escape-string-regexp/package.json
@@ -1,6 +1,6 @@
{
"name": "escape-string-regexp",
- "version": "1.0.4",
+ "version": "1.0.5",
"description": "Escape RegExp special characters",
"license": "MIT",
"repository": {
@@ -47,25 +47,29 @@
"ava": "*",
"xo": "*"
},
- "gitHead": "e9ca6832a9506ca26402cb0e6dc95efcf35b0b97",
+ "gitHead": "db124a3e1aae9d692c4899e42a5c6c3e329eaa20",
"bugs": {
"url": "https://github.com/sindresorhus/escape-string-regexp/issues"
},
"homepage": "https://github.com/sindresorhus/escape-string-regexp",
- "_id": "escape-string-regexp@1.0.4",
- "_shasum": "b85e679b46f72d03fbbe8a3bf7259d535c21b62f",
+ "_id": "escape-string-regexp@1.0.5",
+ "_shasum": "1b61c0562190a8dff6ae3bb2cf0200ca130b86d4",
"_from": "escape-string-regexp@>=1.0.2 <2.0.0",
- "_npmVersion": "2.14.7",
- "_nodeVersion": "4.2.1",
+ "_npmVersion": "2.14.12",
+ "_nodeVersion": "4.2.6",
"_npmUser": {
- "name": "sindresorhus",
- "email": "sindresorhus@gmail.com"
+ "name": "jbnicolai",
+ "email": "jappelman@xebia.com"
},
"dist": {
- "shasum": "b85e679b46f72d03fbbe8a3bf7259d535c21b62f",
- "tarball": "http://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.4.tgz"
+ "shasum": "1b61c0562190a8dff6ae3bb2cf0200ca130b86d4",
+ "tarball": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-9-west.internal.npmjs.com",
+ "tmp": "tmp/escape-string-regexp-1.0.5.tgz_1456059312074_0.7245344955008477"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.4.tgz",
+ "_resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
index d39a62eb9150d7..443129da43458d 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/node_modules/has-ansi/package.json
@@ -77,7 +77,7 @@
},
"dist": {
"shasum": "34f5049ce1ecdf2b0649af3ef24e45ed35416d91",
- "tarball": "http://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz"
+ "tarball": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
index dc5e754dd7e049..d96edabd8fa8ca 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/chalk/package.json
@@ -1,6 +1,6 @@
{
"name": "chalk",
- "version": "1.1.1",
+ "version": "1.1.3",
"description": "Terminal string styling done right. Much color.",
"license": "MIT",
"repository": {
@@ -9,12 +9,12 @@
},
"maintainers": [
{
- "name": "sindresorhus",
- "email": "sindresorhus@gmail.com"
+ "name": "qix",
+ "email": "i.am.qix@gmail.com"
},
{
- "name": "jbnicolai",
- "email": "jappelman@xebia.com"
+ "name": "sindresorhus",
+ "email": "sindresorhus@gmail.com"
},
{
"name": "unicorn",
@@ -57,7 +57,7 @@
"text"
],
"dependencies": {
- "ansi-styles": "^2.1.0",
+ "ansi-styles": "^2.2.1",
"escape-string-regexp": "^1.0.2",
"has-ansi": "^2.0.0",
"strip-ansi": "^3.0.0",
@@ -79,25 +79,29 @@
"mocha"
]
},
- "gitHead": "8b554e254e89c85c1fd04dcc444beeb15824e1a5",
+ "gitHead": "0d8d8c204eb87a4038219131ad4d8369c9f59d24",
"bugs": {
"url": "https://github.com/chalk/chalk/issues"
},
"homepage": "https://github.com/chalk/chalk#readme",
- "_id": "chalk@1.1.1",
- "_shasum": "509afb67066e7499f7eb3535c77445772ae2d019",
+ "_id": "chalk@1.1.3",
+ "_shasum": "a8115c55e4a702fe4d150abd3872822a7e09fc98",
"_from": "chalk@>=1.1.1 <2.0.0",
- "_npmVersion": "2.13.5",
- "_nodeVersion": "0.12.7",
+ "_npmVersion": "2.14.2",
+ "_nodeVersion": "0.10.32",
"_npmUser": {
- "name": "sindresorhus",
- "email": "sindresorhus@gmail.com"
+ "name": "qix",
+ "email": "i.am.qix@gmail.com"
},
"dist": {
- "shasum": "509afb67066e7499f7eb3535c77445772ae2d019",
- "tarball": "http://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz"
+ "shasum": "a8115c55e4a702fe4d150abd3872822a7e09fc98",
+ "tarball": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/chalk-1.1.3.tgz_1459210604109_0.3892582862172276"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.1.tgz",
+ "_resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/package.json
index 15a0435ce405f2..8d4aa4e37c9333 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/commander/package.json
@@ -49,7 +49,7 @@
},
"dist": {
"shasum": "9c99094176e12240cb22d6c5146098400fe0f7d4",
- "tarball": "http://registry.npmjs.org/commander/-/commander-2.9.0.tgz"
+ "tarball": "https://registry.npmjs.org/commander/-/commander-2.9.0.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/README.md b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/README.md
index cbf2b20d336045..104a425ad204ed 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/README.md
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/README.md
@@ -40,7 +40,7 @@ console.log(validate.errors)
You can also pass the schema as a string
``` js
-var validate = validate('{"type": ... }')
+var validate = validator('{"type": ... }')
```
Optionally you can use the require submodule to load a schema from `__dirname`
@@ -128,7 +128,7 @@ var validate = validator({
})
validate({hello: 100});
-console.log(validate.errors) // {field: 'data.hello', message: 'is the wrong type', value: 100}
+console.log(validate.errors) // {field: 'data.hello', message: 'is the wrong type', value: 100, type: 'string'}
```
## Greedy mode tries to validate as much as possible
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
index f24db9b1c9062b..f929bb75394944 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/index.js
@@ -5,7 +5,6 @@ var xtend = require('xtend')
var formats = require('./formats')
var get = function(obj, additionalSchemas, ptr) {
- if (/^https?:\/\//.test(ptr)) return null
var visit = function(sub) {
if (sub && sub.id === ptr) return sub
@@ -98,13 +97,25 @@ var unique = function(array) {
return true
}
+var isMultipleOf = function(name, multipleOf) {
+ var res;
+ var factor = ((multipleOf | 0) !== multipleOf) ? Math.pow(10, multipleOf.toString().split('.').pop().length) : 1
+ if (factor > 1) {
+ var factorName = ((name | 0) !== name) ? Math.pow(10, name.toString().split('.').pop().length) : 1
+ if (factorName > factor) res = true
+ else res = Math.round(factor * name) % (factor * multipleOf)
+ }
+ else res = name % multipleOf;
+ return !res;
+}
+
var toType = function(node) {
return node.type
}
var compile = function(schema, cache, root, reporter, opts) {
var fmts = opts ? xtend(formats, opts.formats) : formats
- var scope = {unique:unique, formats:fmts}
+ var scope = {unique:unique, formats:fmts, isMultipleOf:isMultipleOf}
var verbose = opts ? !!opts.verbose : false;
var greedy = opts && opts.greedy !== undefined ?
opts.greedy : false;
@@ -150,7 +161,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (reporter === true) {
validate('if (validate.errors === null) validate.errors = []')
if (verbose) {
- validate('validate.errors.push({field:%s,message:%s,value:%s})', formatName(prop || name), JSON.stringify(msg), value || name)
+ validate('validate.errors.push({field:%s,message:%s,value:%s,type:%s})', formatName(prop || name), JSON.stringify(msg), value || name, JSON.stringify(type))
} else {
validate('validate.errors.push({field:%s,message:%s})', formatName(prop || name), JSON.stringify(msg))
}
@@ -434,9 +445,7 @@ var compile = function(schema, cache, root, reporter, opts) {
if (node.multipleOf !== undefined) {
if (type !== 'number' && type !== 'integer') validate('if (%s) {', types.number(name))
- var factor = ((node.multipleOf | 0) !== node.multipleOf) ? Math.pow(10, node.multipleOf.toString().split('.').pop().length) : 1
- if (factor > 1) validate('if ((%d*%s) % %d) {', factor, name, factor*node.multipleOf)
- else validate('if (%s % %d) {', name, node.multipleOf)
+ validate('if (!isMultipleOf(%s, %d)) {', name, node.multipleOf)
error('has a remainder')
validate('}')
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
index 4b0677650bcd69..4ec4bbb256f693 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/jsonpointer/package.json
@@ -56,7 +56,7 @@
],
"dist": {
"shasum": "3af1dd20fe85463910d469a385e33017d2a030d9",
- "tarball": "http://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz"
+ "tarball": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-2.0.0.tgz",
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
index 6f380f079b3ce9..08542c5ff93b0b 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/node_modules/xtend/package.json
@@ -70,7 +70,7 @@
},
"dist": {
"shasum": "a5c6d532be656e23db820efb943a1f04998d63af",
- "tarball": "http://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
+ "tarball": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/package.json
index 63ba1b87a74884..22739524cbf367 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/package.json
@@ -1,6 +1,6 @@
{
"name": "is-my-json-valid",
- "version": "2.12.4",
+ "version": "2.13.1",
"description": "A JSONSchema validator that uses code generation to be extremely fast",
"main": "index.js",
"dependencies": {
@@ -33,9 +33,9 @@
"url": "https://github.com/mafintosh/is-my-json-valid/issues"
},
"homepage": "https://github.com/mafintosh/is-my-json-valid",
- "gitHead": "8978aa8f40eef4ac47a5d18270c13abd48927ddb",
- "_id": "is-my-json-valid@2.12.4",
- "_shasum": "d4ed2bc1d7f88daf8d0f763b3e3e39a69bd37880",
+ "gitHead": "5bacc71441750bc6e79829abcfc21d4f2f0c4396",
+ "_id": "is-my-json-valid@2.13.1",
+ "_shasum": "d55778a82feb6b0963ff4be111d5d1684e890707",
"_from": "is-my-json-valid@>=2.12.4 <3.0.0",
"_npmVersion": "2.14.7",
"_nodeVersion": "4.2.3",
@@ -44,8 +44,8 @@
"email": "mathiasbuus@gmail.com"
},
"dist": {
- "shasum": "d4ed2bc1d7f88daf8d0f763b3e3e39a69bd37880",
- "tarball": "http://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.4.tgz"
+ "shasum": "d55778a82feb6b0963ff4be111d5d1684e890707",
+ "tarball": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.13.1.tgz"
},
"maintainers": [
{
@@ -65,7 +65,11 @@
"email": "i@yoshuawuyts.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-5-east.internal.npmjs.com",
+ "tmp": "tmp/is-my-json-valid-2.13.1.tgz_1456180270224_0.17748022079467773"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.12.4.tgz",
+ "_resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.13.1.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json
index ca3b7618053f49..c13b2670b9dd9a 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json
@@ -56,5 +56,41 @@
"valid": false
}
]
+ },
+ {
+ "description": "by decimal number where floating point precision is wrong",
+ "schema": {"multipleOf": 0.01},
+ "tests": [
+ {
+ "description": "Number 2 is multiple of 0.01",
+ "data": 2,
+ "valid": true
+ },
+ {
+ "description": "Number 2.1 is multiple of 0.01",
+ "data": 2.1,
+ "valid": true
+ },
+ {
+ "description": "Number 2.2 is multiple of 0.01",
+ "data": 2.2,
+ "valid": true
+ },
+ {
+ "description": "Number 2.3 is multiple of 0.01",
+ "data": 2.3,
+ "valid": true
+ },
+ {
+ "description": "Number 2.4 is multiple of 0.01",
+ "data": 2.4,
+ "valid": true
+ },
+ {
+ "description": "Number 1.211 is not multiple of 0.01",
+ "data": 1.211,
+ "valid": false
+ }
+ ]
}
]
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/misc.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/misc.js
index b5109e576f26b9..275f2ac72f3877 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/misc.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/is-my-json-valid/test/misc.js
@@ -98,6 +98,7 @@ tape('additional props', function(t) {
t.ok(validate({}))
t.notOk(validate({foo:'bar'}))
t.ok(validate.errors[0].value === 'data.foo', 'should output the property not allowed in verbose mode')
+ t.strictEqual(validate.errors[0].type, 'object', 'error object should contain the type')
t.end()
})
@@ -278,6 +279,22 @@ tape('external schemas', function(t) {
t.end()
})
+tape('external schema URIs', function(t) {
+ var ext = {type: 'string'}
+ var schema = {
+ required: true,
+ $ref: 'http://example.com/schemas/schemaURIs'
+ }
+
+ var opts = {schemas:{}};
+ opts.schemas['http://example.com/schemas/schemaURIs'] = ext;
+ var validate = validator(schema, opts)
+
+ t.ok(validate('hello string'), 'is a string')
+ t.notOk(validate(42), 'not a string')
+ t.end()
+})
+
tape('top-level external schema', function(t) {
var defs = {
"string": {
@@ -354,6 +371,7 @@ tape('verbose mode', function(t) {
t.ok(validate({hello: 'string'}), 'should be valid')
t.notOk(validate({hello: 100}), 'should not be valid')
t.strictEqual(validate.errors[0].value, 100, 'error object should contain the invalid value')
+ t.strictEqual(validate.errors[0].type, 'string', 'error object should contain the type')
t.end()
})
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/index.js b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/index.js
index cc64b902c80b4b..777377a1f777b1 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/index.js
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/index.js
@@ -1,3 +1,3 @@
'use strict';
-module.exports = global.Promise || require('pinkie');
+module.exports = typeof Promise === 'function' ? Promise : require('pinkie');
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/package.json b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/package.json
index 6a885455c688fd..7e84fa5a0ea428 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/node_modules/pinkie-promise/package.json
@@ -1,6 +1,6 @@
{
"name": "pinkie-promise",
- "version": "2.0.0",
+ "version": "2.0.1",
"description": "ES2015 Promise ponyfill",
"license": "MIT",
"repository": {
@@ -35,23 +35,23 @@
"devDependencies": {
"mocha": "*"
},
- "gitHead": "f90fcae9838bcae7ae1ae4ebc9b29f11e5db4980",
+ "gitHead": "4a936c09c34ad591a25db93f1216d242de0d6184",
"bugs": {
"url": "https://github.com/floatdrop/pinkie-promise/issues"
},
"homepage": "https://github.com/floatdrop/pinkie-promise",
- "_id": "pinkie-promise@2.0.0",
- "_shasum": "4c83538de1f6e660c29e0a13446844f7a7e88259",
+ "_id": "pinkie-promise@2.0.1",
+ "_shasum": "2135d6dfa7a358c069ac9b178776288228450ffa",
"_from": "pinkie-promise@>=2.0.0 <3.0.0",
- "_npmVersion": "2.14.7",
- "_nodeVersion": "4.2.0",
+ "_npmVersion": "2.14.20",
+ "_nodeVersion": "4.4.1",
"_npmUser": {
"name": "floatdrop",
"email": "floatdrop@gmail.com"
},
"dist": {
- "shasum": "4c83538de1f6e660c29e0a13446844f7a7e88259",
- "tarball": "http://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.0.tgz"
+ "shasum": "2135d6dfa7a358c069ac9b178776288228450ffa",
+ "tarball": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz"
},
"maintainers": [
{
@@ -59,7 +59,11 @@
"email": "floatdrop@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/pinkie-promise-2.0.1.tgz_1460309839126_0.3422858319245279"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.0.tgz",
+ "_resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/har-validator/package.json b/deps/npm/node_modules/request/node_modules/har-validator/package.json
index f8b242fae89ce1..43483790259557 100644
--- a/deps/npm/node_modules/request/node_modules/har-validator/package.json
+++ b/deps/npm/node_modules/request/node_modules/har-validator/package.json
@@ -79,7 +79,7 @@
],
"dist": {
"shasum": "cdcbc08188265ad119b6a5a7c8ab70eecfb5d27d",
- "tarball": "http://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz"
+ "tarball": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/har-validator/-/har-validator-2.0.6.tgz",
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
index 4c5b3866b0185b..9a9fa164f3daf0 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/boom/package.json
@@ -56,7 +56,7 @@
],
"dist": {
"shasum": "39c8918ceff5799f83f9492a848f625add0c766f",
- "tarball": "http://registry.npmjs.org/boom/-/boom-2.10.1.tgz"
+ "tarball": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz"
},
"directories": {},
"_resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz",
diff --git a/deps/npm/node_modules/request/node_modules/hawk/node_modules/hoek/package.json b/deps/npm/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
index 4e3968f48bc998..ef22487ceb3821 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/node_modules/hoek/package.json
@@ -39,7 +39,7 @@
},
"dist": {
"shasum": "20bb7403d3cea398e91dc4710a8ff1b8274a25ed",
- "tarball": "http://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz"
+ "tarball": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/hawk/package.json b/deps/npm/node_modules/request/node_modules/hawk/package.json
index 7ed5f0fe857bcc..0882fa80c5c72c 100644
--- a/deps/npm/node_modules/request/node_modules/hawk/package.json
+++ b/deps/npm/node_modules/request/node_modules/hawk/package.json
@@ -45,7 +45,7 @@
"homepage": "https://github.com/hueniverse/hawk#readme",
"_id": "hawk@3.1.3",
"_shasum": "078444bd7c1640b0fe540d2c9b73d59678e8e1c4",
- "_from": "hawk@>=3.1.0 <3.2.0",
+ "_from": "hawk@>=3.1.3 <3.2.0",
"_npmVersion": "3.3.12",
"_nodeVersion": "5.4.1",
"_npmUser": {
@@ -54,7 +54,7 @@
},
"dist": {
"shasum": "078444bd7c1640b0fe540d2c9b73d59678e8e1c4",
- "tarball": "http://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz"
+ "tarball": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/json-schema/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/json-schema/package.json
index 99d230debfd022..bce830a65e367a 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/json-schema/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/json-schema/package.json
@@ -43,7 +43,7 @@
"_id": "json-schema@0.2.2",
"dist": {
"shasum": "50354f19f603917c695f70b85afa77c3b0f23506",
- "tarball": "http://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz"
+ "tarball": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz"
},
"_npmVersion": "1.1.59",
"_npmUser": {
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/verror/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/verror/package.json
index 569d9998321ac2..20a35ea44896bf 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/verror/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/jsprim/node_modules/verror/package.json
@@ -19,7 +19,7 @@
"_id": "verror@1.3.6",
"dist": {
"shasum": "cff5df12946d297d2baaefaa2689e25be01c005c",
- "tarball": "http://registry.npmjs.org/verror/-/verror-1.3.6.tgz"
+ "tarball": "https://registry.npmjs.org/verror/-/verror-1.3.6.tgz"
},
"_npmVersion": "1.1.65",
"_npmUser": {
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/.travis.yml b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/.travis.yml
index 54692343d99003..c3394c258fc2aa 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/.travis.yml
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/.travis.yml
@@ -1,11 +1,11 @@
language: node_js
node_js:
- - "4.2"
+ - "5.10"
+ - "4.4"
- "4.1"
- "0.12"
- "0.10"
- - "0.8"
before_install:
- "make check"
after_success:
- - '[ "${TRAVIS_NODE_VERSION}" = "4.2" ] && make codecovio'
+ - '[ "${TRAVIS_NODE_VERSION}" = "4.4" ] && make codecovio'
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/README.md b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/README.md
index bc34d48a49fd3d..403f6ac89803b4 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/README.md
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/README.md
@@ -111,7 +111,7 @@ Usage
## Public keys
-### `parseKey(data[, format = 'auto'[, name]])`
+### `parseKey(data[, format = 'auto'[, options]])`
Parses a key from a given data format and returns a new `Key` object.
@@ -126,8 +126,12 @@ Parameters
- `rfc4253`: raw OpenSSH wire format
- `openssh`: new post-OpenSSH 6.5 internal format, produced by
`ssh-keygen -o`
-- `name` -- Optional name for the key being parsed (eg. the filename that
- was opened). Used to generate Error messages
+- `options` -- Optional Object, extra options, with keys:
+ - `filename` -- Optional String, name for the key being parsed
+ (eg. the filename that was opened). Used to generate
+ Error messages
+ - `passphrase` -- Optional String, encryption passphrase used to decrypt an
+ encrypted PEM file
### `Key.isKey(obj)`
@@ -212,7 +216,7 @@ to call this function on other keys will yield an `Error`.
## Private keys
-### `parsePrivateKey(data[, format = 'auto'[, name]])`
+### `parsePrivateKey(data[, format = 'auto'[, options]])`
Parses a private key from a given data format and returns a new
`PrivateKey` object.
@@ -227,8 +231,12 @@ Parameters
`ssh-keygen -o`
- `pkcs1`, `pkcs8`: variants of `pem`
- `rfc4253`: raw OpenSSH wire format
-- `name` -- Optional name for the key being parsed (eg. the filename that
- was opened). Used to generate Error messages
+- `options` -- Optional Object, extra options, with keys:
+ - `filename` -- Optional String, name for the key being parsed
+ (eg. the filename that was opened). Used to generate
+ Error messages
+ - `passphrase` -- Optional String, encryption passphrase used to decrypt an
+ encrypted PEM file
### `PrivateKey.isPrivateKey(obj)`
@@ -425,10 +433,21 @@ The key data given could not be parsed as a valid key.
Properties
-- `keyName` -- `name` that was given to `Key#parse`
+- `keyName` -- `filename` that was given to `Key#parse`
- `format` -- the `format` that was trying to parse the key
- `innerErr` -- the inner Error thrown by the format parser
+### `KeyEncryptedError`
+
+The key is encrypted with a symmetric key (ie, it is password protected). The
+parsing operation would succeed if it was given the `passphrase` option.
+
+Properties
+
+- `keyName` -- `filename` that was given to `Key#parse`
+- `format` -- the `format` that was trying to parse the key (currently can only
+ be `"pem"`)
+
Friends of sshpk
----------------
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-conv b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-conv
index 8eec411ffb8cb1..a1205a45d5431f 100755
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-conv
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-conv
@@ -7,6 +7,9 @@ var dashdash = require('dashdash');
var sshpk = require('../lib/index');
var fs = require('fs');
var path = require('path');
+var tty = require('tty');
+var readline = require('readline');
+var getPassword = require('getpass').getPass;
var options = [
{
@@ -131,7 +134,9 @@ if (require.main === module) {
while ((data = inFile.read()))
bufs.push(data);
});
- inFile.on('end', function () {
+ var parseOpts = {};
+ parseOpts.filename = inFileName;
+ inFile.on('end', function processKey() {
var buf = Buffer.concat(bufs);
var fmt = 'auto';
if (opts.informat)
@@ -140,8 +145,15 @@ if (require.main === module) {
if (opts.private)
f = sshpk.parsePrivateKey;
try {
- var key = f(buf, fmt, inFileName);
+ var key = f(buf, fmt, parseOpts);
} catch (e) {
+ if (e.name === 'KeyEncryptedError') {
+ getPassword(function (err, pw) {
+ parseOpts.passphrase = pw;
+ processKey();
+ });
+ return;
+ }
console.error('sshpk-conv: ' +
e.name + ': ' + e.message);
process.exit(1);
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-sign b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-sign
index c8b91932aa62ee..673fc9864214db 100755
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-sign
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/bin/sshpk-sign
@@ -7,6 +7,7 @@ var dashdash = require('dashdash');
var sshpk = require('../lib/index');
var fs = require('fs');
var path = require('path');
+var getPassword = require('getpass').getPass;
var options = [
{
@@ -51,6 +52,8 @@ var options = [
}
];
+var parseOpts = {};
+
if (require.main === module) {
var parser = dashdash.createParser({
options: options
@@ -79,11 +82,23 @@ if (require.main === module) {
}
var keyData = fs.readFileSync(opts.identity);
+ parseOpts.filename = opts.identity;
+
+ run();
+}
+function run() {
var key;
try {
- key = sshpk.parsePrivateKey(keyData);
+ key = sshpk.parsePrivateKey(keyData, 'auto', parseOpts);
} catch (e) {
+ if (e.name === 'KeyEncryptedError') {
+ getPassword(function (err, pw) {
+ parseOpts.passphrase = pw;
+ run();
+ });
+ return;
+ }
console.error('sshpk-sign: error loading private key "' +
opts.identity + '": ' + e.name + ': ' + e.message);
process.exit(1);
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/errors.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/errors.js
index 3551c1071e27ad..d984f1ac3f66eb 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/errors.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/errors.js
@@ -50,9 +50,22 @@ function SignatureParseError(type, format, innerErr) {
}
util.inherits(SignatureParseError, Error);
+function KeyEncryptedError(name, format) {
+ if (Error.captureStackTrace)
+ Error.captureStackTrace(this, KeyEncryptedError);
+ this.name = 'KeyEncryptedError';
+ this.format = format;
+ this.keyName = name;
+ this.message = 'The ' + format + ' format key ' + name + ' is ' +
+ 'encrypted (password-protected), and no passphrase was ' +
+ 'provided in `options`';
+}
+util.inherits(KeyEncryptedError, Error);
+
module.exports = {
FingerprintFormatError: FingerprintFormatError,
InvalidAlgorithmError: InvalidAlgorithmError,
KeyParseError: KeyParseError,
- SignatureParseError: SignatureParseError
+ SignatureParseError: SignatureParseError,
+ KeyEncryptedError: KeyEncryptedError
};
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/auto.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/auto.js
index 37c3cc81356676..973c03245e3807 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/auto.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/auto.js
@@ -14,24 +14,24 @@ var pem = require('./pem');
var ssh = require('./ssh');
var rfc4253 = require('./rfc4253');
-function read(buf) {
+function read(buf, options) {
if (typeof (buf) === 'string') {
if (buf.trim().match(/^[-]+[ ]*BEGIN/))
- return (pem.read(buf));
+ return (pem.read(buf, options));
if (buf.match(/^\s*ssh-[a-z]/))
- return (ssh.read(buf));
+ return (ssh.read(buf, options));
if (buf.match(/^\s*ecdsa-/))
- return (ssh.read(buf));
+ return (ssh.read(buf, options));
buf = new Buffer(buf, 'binary');
} else {
assert.buffer(buf);
if (findPEMHeader(buf))
- return (pem.read(buf));
+ return (pem.read(buf, options));
if (findSSHHeader(buf))
- return (ssh.read(buf));
+ return (ssh.read(buf, options));
}
if (buf.readUInt32BE(0) < buf.length)
- return (rfc4253.read(buf));
+ return (rfc4253.read(buf, options));
throw (new Error('Failed to auto-detect format of key'));
}
@@ -68,6 +68,6 @@ function findPEMHeader(buf) {
return (true);
}
-function write(key) {
+function write(key, options) {
throw (new Error('"auto" format cannot be used for writing'));
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pem.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pem.js
index 1d907607e29dc3..5318b35165336d 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pem.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pem.js
@@ -7,6 +7,7 @@ module.exports = {
var assert = require('assert-plus');
var asn1 = require('asn1');
+var crypto = require('crypto');
var algs = require('../algs');
var utils = require('../utils');
var Key = require('../key');
@@ -17,11 +18,13 @@ var pkcs8 = require('./pkcs8');
var sshpriv = require('./ssh-private');
var rfc4253 = require('./rfc4253');
+var errors = require('../errors');
+
/*
* For reading we support both PKCS#1 and PKCS#8. If we find a private key,
* we just take the public component of it and use that.
*/
-function read(buf, forceType) {
+function read(buf, options, forceType) {
var input = buf;
if (typeof (buf) !== 'string') {
assert.buffer(buf, 'buf');
@@ -58,12 +61,26 @@ function read(buf, forceType) {
break;
headers[m[1].toLowerCase()] = m[2];
}
+
+ var cipher, key, iv;
if (headers['proc-type']) {
var parts = headers['proc-type'].split(',');
if (parts[0] === '4' && parts[1] === 'ENCRYPTED') {
- throw (new Error('PEM key is encrypted ' +
- '(password-protected). Please use the ' +
- 'SSH agent or decrypt the key.'));
+ if (typeof (options.passphrase) === 'string') {
+ options.passphrase = new Buffer(
+ options.passphrase, 'utf-8');
+ }
+ if (!Buffer.isBuffer(options.passphrase)) {
+ throw (new errors.KeyEncryptedError(
+ options.filename, 'PEM'));
+ } else {
+ parts = headers['dek-info'].split(',');
+ assert.ok(parts.length === 2);
+ cipher = parts[0].toLowerCase();
+ iv = new Buffer(parts[1], 'hex');
+ key = utils.opensslKeyDeriv(cipher, iv,
+ options.passphrase, 1).key;
+ }
}
}
@@ -71,6 +88,23 @@ function read(buf, forceType) {
lines = lines.slice(0, -1).join('');
buf = new Buffer(lines, 'base64');
+ if (cipher && key && iv) {
+ var cipherStream = crypto.createDecipheriv(cipher, key, iv);
+ var chunk, chunks = [];
+ cipherStream.once('error', function (e) {
+ if (e.toString().indexOf('bad decrypt') !== -1) {
+ throw (new Error('Incorrect passphrase ' +
+ 'supplied, could not decrypt key'));
+ }
+ throw (e);
+ });
+ cipherStream.write(buf);
+ cipherStream.end();
+ while ((chunk = cipherStream.read()) !== null)
+ chunks.push(chunk);
+ buf = Buffer.concat(chunks);
+ }
+
/* The new OpenSSH internal format abuses PEM headers */
if (alg && alg.toLowerCase() === 'openssh')
return (sshpriv.readSSHPrivate(type, buf));
@@ -98,7 +132,7 @@ function read(buf, forceType) {
}
}
-function write(key, type) {
+function write(key, options, type) {
assert.object(key);
var alg = {'ecdsa': 'EC', 'rsa': 'RSA', 'dsa': 'DSA'}[key.type];
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs1.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs1.js
index 6f9a24f115042b..a5676af6ef8420 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs1.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs1.js
@@ -19,12 +19,12 @@ var pem = require('./pem');
var pkcs8 = require('./pkcs8');
var readECDSACurve = pkcs8.readECDSACurve;
-function read(buf) {
- return (pem.read(buf, 'pkcs1'));
+function read(buf, options) {
+ return (pem.read(buf, options, 'pkcs1'));
}
-function write(key) {
- return (pem.write(key, 'pkcs1'));
+function write(key, options) {
+ return (pem.write(key, options, 'pkcs1'));
}
/* Helper to read in a single mpint */
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs8.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs8.js
index c57c85537a72ca..33fb7cc475923d 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs8.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/pkcs8.js
@@ -18,12 +18,12 @@ var Key = require('../key');
var PrivateKey = require('../private-key');
var pem = require('./pem');
-function read(buf) {
- return (pem.read(buf, 'pkcs8'));
+function read(buf, options) {
+ return (pem.read(buf, options, 'pkcs8'));
}
-function write(key) {
- return (pem.write(key, 'pkcs8'));
+function write(key, options) {
+ return (pem.write(key, options, 'pkcs8'));
}
/* Helper to read in a single mpint */
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/rfc4253.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/rfc4253.js
index 94e6a907d4f0ef..9d436dd92155f6 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/rfc4253.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/rfc4253.js
@@ -52,7 +52,7 @@ function keyTypeToAlg(key) {
throw (new Error('Unknown key type ' + key.type));
}
-function read(partial, type, buf) {
+function read(partial, type, buf, options) {
if (typeof (buf) === 'string')
buf = new Buffer(buf);
assert.buffer(buf, 'buf');
@@ -120,7 +120,7 @@ function read(partial, type, buf) {
return (new Constructor(key));
}
-function write(key) {
+function write(key, options) {
assert.object(key);
var alg = keyTypeToAlg(key);
@@ -137,7 +137,8 @@ function write(key) {
for (i = 0; i < parts.length; ++i) {
var data = key.part[parts[i]].data;
- data = utils.mpNormalize(data);
+ if (algInfo.normalize !== false)
+ data = utils.mpNormalize(data);
buf.writeBuffer(data);
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh-private.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh-private.js
index 02d7c27e8f33dd..bfbdab527f9b82 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh-private.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh-private.js
@@ -18,8 +18,8 @@ var pem = require('./pem');
var rfc4253 = require('./rfc4253');
var SSHBuffer = require('../ssh-buffer');
-function read(buf) {
- return (pem.read(buf));
+function read(buf, options) {
+ return (pem.read(buf, options));
}
var MAGIC = 'openssh-key-v1';
@@ -76,7 +76,7 @@ function readSSHPrivate(type, buf) {
return (key);
}
-function write(key) {
+function write(key, options) {
var pubKey;
if (PrivateKey.isPrivateKey(key))
pubKey = key.toPublic();
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh.js
index f7baa48cfea2f4..655c9eaf3bf7c9 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/formats/ssh.js
@@ -18,7 +18,7 @@ var SSHKEY_RE = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/]+[=]*)([\n \t]+([^\n]+))?$/;
/*JSSTYLED*/
var SSHKEY_RE2 = /^([a-z0-9-]+)[ \t]+([a-zA-Z0-9+\/ \t\n]+[=]*)(.*)$/;
-function read(buf) {
+function read(buf, options) {
if (typeof (buf) !== 'string') {
assert.buffer(buf, 'buf');
buf = buf.toString('ascii');
@@ -95,7 +95,7 @@ function read(buf) {
return (key);
}
-function write(key) {
+function write(key, options) {
assert.object(key);
if (!Key.isKey(key))
throw (new Error('Must be a public key'));
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/index.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/index.js
index 5cd962b31257fc..0b40429c95c21f 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/index.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/index.js
@@ -21,5 +21,6 @@ module.exports = {
FingerprintFormatError: errs.FingerprintFormatError,
InvalidAlgorithmError: errs.InvalidAlgorithmError,
KeyParseError: errs.KeyParseError,
- SignatureParseError: errs.SignatureParseError
+ SignatureParseError: errs.SignatureParseError,
+ KeyEncryptedError: errs.KeyEncryptedError
};
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/key.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/key.js
index d140f0e54d8bcd..edc5143426a122 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/key.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/key.js
@@ -77,11 +77,12 @@ function Key(opts) {
Key.formats = formats;
-Key.prototype.toBuffer = function (format) {
+Key.prototype.toBuffer = function (format, options) {
if (format === undefined)
format = 'ssh';
assert.string(format, 'format');
assert.object(formats[format], 'formats[format]');
+ assert.optionalObject(options, 'options');
if (format === 'rfc4253') {
if (this._rfc4253Cache === undefined)
@@ -89,11 +90,11 @@ Key.prototype.toBuffer = function (format) {
return (this._rfc4253Cache);
}
- return (formats[format].write(this));
+ return (formats[format].write(this, options));
};
-Key.prototype.toString = function (format) {
- return (this.toBuffer(format).toString());
+Key.prototype.toString = function (format, options) {
+ return (this.toBuffer(format, options).toString());
};
Key.prototype.hash = function (algo) {
@@ -107,9 +108,6 @@ Key.prototype.hash = function (algo) {
var hash = crypto.createHash(algo).
update(this.toBuffer('rfc4253')).digest();
- /* Workaround for node 0.8 */
- if (typeof (hash) === 'string')
- hash = new Buffer(hash, 'binary');
this._hashCache[algo] = hash;
return (hash);
};
@@ -158,10 +156,7 @@ Key.prototype.createVerify = function (hashAlgo) {
var v, nm, err;
try {
- nm = this.type.toUpperCase() + '-';
- if (this.type === 'ecdsa')
- nm = 'ecdsa-with-';
- nm += hashAlgo.toUpperCase();
+ nm = hashAlgo.toUpperCase();
v = crypto.createVerify(nm);
} catch (e) {
err = e;
@@ -213,26 +208,34 @@ Key.prototype.createDiffieHellman = function () {
};
Key.prototype.createDH = Key.prototype.createDiffieHellman;
-Key.parse = function (data, format, name) {
+Key.parse = function (data, format, options) {
if (typeof (data) !== 'string')
assert.buffer(data, 'data');
if (format === undefined)
format = 'auto';
assert.string(format, 'format');
- if (name === undefined)
- name = '(unnamed)';
+ if (typeof (options) === 'string')
+ options = { filename: options };
+ assert.optionalObject(options, 'options');
+ if (options === undefined)
+ options = {};
+ assert.optionalString(options.filename, 'options.filename');
+ if (options.filename === undefined)
+ options.filename = '(unnamed)';
assert.object(formats[format], 'formats[format]');
try {
- var k = formats[format].read(data);
+ var k = formats[format].read(data, options);
if (k instanceof PrivateKey)
k = k.toPublic();
if (!k.comment)
- k.comment = name;
+ k.comment = options.filename;
return (k);
} catch (e) {
- throw (new KeyParseError(name, format, e));
+ if (e.name === 'KeyEncryptedError')
+ throw (e);
+ throw (new KeyParseError(options.filename, format, e));
}
};
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/private-key.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/private-key.js
index 993ae32650237a..f80d93966286b5 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/private-key.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/private-key.js
@@ -23,6 +23,7 @@ var Key = require('./key');
var InvalidAlgorithmError = errs.InvalidAlgorithmError;
var KeyParseError = errs.KeyParseError;
+var KeyEncryptedError = errs.KeyEncryptedError;
var formats = {};
formats['auto'] = require('./formats/auto');
@@ -44,13 +45,14 @@ util.inherits(PrivateKey, Key);
PrivateKey.formats = formats;
-PrivateKey.prototype.toBuffer = function (format) {
+PrivateKey.prototype.toBuffer = function (format, options) {
if (format === undefined)
format = 'pkcs1';
assert.string(format, 'format');
assert.object(formats[format], 'formats[format]');
+ assert.optionalObject(options, 'options');
- return (formats[format].write(this));
+ return (formats[format].write(this, options));
};
PrivateKey.prototype.hash = function (algo) {
@@ -146,10 +148,7 @@ PrivateKey.prototype.createSign = function (hashAlgo) {
var v, nm, err;
try {
- nm = this.type.toUpperCase() + '-';
- if (this.type === 'ecdsa')
- nm = 'ecdsa-with-';
- nm += hashAlgo.toUpperCase();
+ nm = hashAlgo.toUpperCase();
v = crypto.createSign(nm);
} catch (e) {
err = e;
@@ -175,25 +174,33 @@ PrivateKey.prototype.createSign = function (hashAlgo) {
return (v);
};
-PrivateKey.parse = function (data, format, name) {
+PrivateKey.parse = function (data, format, options) {
if (typeof (data) !== 'string')
assert.buffer(data, 'data');
if (format === undefined)
format = 'auto';
assert.string(format, 'format');
- if (name === undefined)
- name = '(unnamed)';
+ if (typeof (options) === 'string')
+ options = { filename: options };
+ assert.optionalObject(options, 'options');
+ if (options === undefined)
+ options = {};
+ assert.optionalString(options.filename, 'options.filename');
+ if (options.filename === undefined)
+ options.filename = '(unnamed)';
assert.object(formats[format], 'formats[format]');
try {
- var k = formats[format].read(data);
+ var k = formats[format].read(data, options);
assert.ok(k instanceof PrivateKey, 'key is not a private key');
if (!k.comment)
- k.comment = name;
+ k.comment = options.filename;
return (k);
} catch (e) {
- throw (new KeyParseError(name, format, e));
+ if (e.name === 'KeyEncryptedError')
+ throw (e);
+ throw (new KeyParseError(options.filename, format, e));
}
};
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/utils.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/utils.js
index 34c22d31a6cf11..d57245cc16b41d 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/utils.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/lib/utils.js
@@ -8,11 +8,13 @@ module.exports = {
ecNormalize: ecNormalize,
countZeros: countZeros,
assertCompatible: assertCompatible,
- isCompatible: isCompatible
+ isCompatible: isCompatible,
+ opensslKeyDeriv: opensslKeyDeriv
};
var assert = require('assert-plus');
var PrivateKey = require('./private-key');
+var crypto = require('crypto');
var MAX_CLASS_DEPTH = 3;
@@ -68,6 +70,43 @@ function assertCompatible(obj, klass, needVer, name) {
'version ' + needVer[0] + '.' + needVer[1]);
}
+var CIPHER_LEN = {
+ 'des-ede3-cbc': { key: 7, iv: 8 },
+ 'aes-128-cbc': { key: 16, iv: 16 }
+};
+var PKCS5_SALT_LEN = 8;
+
+function opensslKeyDeriv(cipher, salt, passphrase, count) {
+ assert.buffer(salt, 'salt');
+ assert.buffer(passphrase, 'passphrase');
+ assert.number(count, 'iteration count');
+
+ var clen = CIPHER_LEN[cipher];
+ assert.object(clen, 'supported cipher');
+
+ salt = salt.slice(0, PKCS5_SALT_LEN);
+
+ var D, D_prev, bufs;
+ var material = new Buffer(0);
+ while (material.length < clen.key + clen.iv) {
+ bufs = [];
+ if (D_prev)
+ bufs.push(D_prev);
+ bufs.push(passphrase);
+ bufs.push(salt);
+ D = Buffer.concat(bufs);
+ for (var j = 0; j < count; ++j)
+ D = crypto.createHash('md5').update(D).digest();
+ material = Buffer.concat([material, D]);
+ D_prev = D;
+ }
+
+ return ({
+ key: material.slice(0, clen.key),
+ iv: material.slice(clen.key, clen.key + clen.iv)
+ });
+}
+
/* Count leading zero bits on a buffer */
function countZeros(buf) {
var o = 0, obit = 8;
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/asn1/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/asn1/package.json
index 0e1bcc2da7ee81..fa0ac24de61e39 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/asn1/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/asn1/package.json
@@ -58,7 +58,7 @@
],
"dist": {
"shasum": "dac8787713c9966849fc8180777ebe9c1ddf3b86",
- "tarball": "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz"
+ "tarball": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz"
},
"directories": {},
"readme": "ERROR: No README data found!"
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/AUTHORS b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/AUTHORS
new file mode 100644
index 00000000000000..1923524fe40ddb
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/AUTHORS
@@ -0,0 +1,6 @@
+Dave Eddy
+Fred Kuo
+Lars-Magnus Skog
+Mark Cavage
+Patrick Mooney
+Rob Gulewich
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/CHANGES.md b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/CHANGES.md
new file mode 100644
index 00000000000000..57d92bfdb9dae0
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/CHANGES.md
@@ -0,0 +1,14 @@
+# assert-plus Changelog
+
+## 1.0.0
+
+- *BREAKING* assert.number (and derivatives) now accept Infinity as valid input
+- Add assert.finite check. Previous assert.number callers should use this if
+ they expect Infinity inputs to throw.
+
+## 0.2.0
+
+- Fix `assert.object(null)` so it throws
+- Fix optional/arrayOf exports for non-type-of asserts
+- Add optiona/arrayOf exports for Stream/Date/Regex/uuid
+- Add basic unit test coverage
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/README.md b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/README.md
new file mode 100644
index 00000000000000..ec200d161efc93
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/README.md
@@ -0,0 +1,162 @@
+# assert-plus
+
+This library is a super small wrapper over node's assert module that has two
+things: (1) the ability to disable assertions with the environment variable
+NODE\_NDEBUG, and (2) some API wrappers for argument testing. Like
+`assert.string(myArg, 'myArg')`. As a simple example, most of my code looks
+like this:
+
+```javascript
+ var assert = require('assert-plus');
+
+ function fooAccount(options, callback) {
+ assert.object(options, 'options');
+ assert.number(options.id, 'options.id');
+ assert.bool(options.isManager, 'options.isManager');
+ assert.string(options.name, 'options.name');
+ assert.arrayOfString(options.email, 'options.email');
+ assert.func(callback, 'callback');
+
+ // Do stuff
+ callback(null, {});
+ }
+```
+
+# API
+
+All methods that *aren't* part of node's core assert API are simply assumed to
+take an argument, and then a string 'name' that's not a message; `AssertionError`
+will be thrown if the assertion fails with a message like:
+
+ AssertionError: foo (string) is required
+ at test (/home/mark/work/foo/foo.js:3:9)
+ at Object. (/home/mark/work/foo/foo.js:15:1)
+ at Module._compile (module.js:446:26)
+ at Object..js (module.js:464:10)
+ at Module.load (module.js:353:31)
+ at Function._load (module.js:311:12)
+ at Array.0 (module.js:484:10)
+ at EventEmitter._tickCallback (node.js:190:38)
+
+from:
+
+```javascript
+ function test(foo) {
+ assert.string(foo, 'foo');
+ }
+```
+
+There you go. You can check that arrays are of a homogeneous type with `Arrayof$Type`:
+
+```javascript
+ function test(foo) {
+ assert.arrayOfString(foo, 'foo');
+ }
+```
+
+You can assert IFF an argument is not `undefined` (i.e., an optional arg):
+
+```javascript
+ assert.optionalString(foo, 'foo');
+```
+
+Lastly, you can opt-out of assertion checking altogether by setting the
+environment variable `NODE_NDEBUG=1`. This is pseudo-useful if you have
+lots of assertions, and don't want to pay `typeof ()` taxes to v8 in
+production. Be advised: The standard functions re-exported from `assert` are
+also disabled in assert-plus if NDEBUG is specified. Using them directly from
+the `assert` module avoids this behavior.
+
+The complete list of APIs is:
+
+* assert.array
+* assert.bool
+* assert.buffer
+* assert.func
+* assert.number
+* assert.finite
+* assert.object
+* assert.string
+* assert.stream
+* assert.date
+* assert.regexp
+* assert.uuid
+* assert.arrayOfArray
+* assert.arrayOfBool
+* assert.arrayOfBuffer
+* assert.arrayOfFunc
+* assert.arrayOfNumber
+* assert.arrayOfFinite
+* assert.arrayOfObject
+* assert.arrayOfString
+* assert.arrayOfStream
+* assert.arrayOfDate
+* assert.arrayOfRegexp
+* assert.arrayOfUuid
+* assert.optionalArray
+* assert.optionalBool
+* assert.optionalBuffer
+* assert.optionalFunc
+* assert.optionalNumber
+* assert.optionalFinite
+* assert.optionalObject
+* assert.optionalString
+* assert.optionalStream
+* assert.optionalDate
+* assert.optionalRegexp
+* assert.optionalUuid
+* assert.optionalArrayOfArray
+* assert.optionalArrayOfBool
+* assert.optionalArrayOfBuffer
+* assert.optionalArrayOfFunc
+* assert.optionalArrayOfNumber
+* assert.optionalArrayOfFinite
+* assert.optionalArrayOfObject
+* assert.optionalArrayOfString
+* assert.optionalArrayOfStream
+* assert.optionalArrayOfDate
+* assert.optionalArrayOfRegexp
+* assert.optionalArrayOfUuid
+* assert.AssertionError
+* assert.fail
+* assert.ok
+* assert.equal
+* assert.notEqual
+* assert.deepEqual
+* assert.notDeepEqual
+* assert.strictEqual
+* assert.notStrictEqual
+* assert.throws
+* assert.doesNotThrow
+* assert.ifError
+
+# Installation
+
+ npm install assert-plus
+
+## License
+
+The MIT License (MIT)
+Copyright (c) 2012 Mark Cavage
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+## Bugs
+
+See .
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/assert.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/assert.js
new file mode 100644
index 00000000000000..26f944eec307a0
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/assert.js
@@ -0,0 +1,211 @@
+// Copyright (c) 2012, Mark Cavage. All rights reserved.
+// Copyright 2015 Joyent, Inc.
+
+var assert = require('assert');
+var Stream = require('stream').Stream;
+var util = require('util');
+
+
+///--- Globals
+
+/* JSSTYLED */
+var UUID_REGEXP = /^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/;
+
+
+///--- Internal
+
+function _capitalize(str) {
+ return (str.charAt(0).toUpperCase() + str.slice(1));
+}
+
+function _toss(name, expected, oper, arg, actual) {
+ throw new assert.AssertionError({
+ message: util.format('%s (%s) is required', name, expected),
+ actual: (actual === undefined) ? typeof (arg) : actual(arg),
+ expected: expected,
+ operator: oper || '===',
+ stackStartFunction: _toss.caller
+ });
+}
+
+function _getClass(arg) {
+ return (Object.prototype.toString.call(arg).slice(8, -1));
+}
+
+function noop() {
+ // Why even bother with asserts?
+}
+
+
+///--- Exports
+
+var types = {
+ bool: {
+ check: function (arg) { return typeof (arg) === 'boolean'; }
+ },
+ func: {
+ check: function (arg) { return typeof (arg) === 'function'; }
+ },
+ string: {
+ check: function (arg) { return typeof (arg) === 'string'; }
+ },
+ object: {
+ check: function (arg) {
+ return typeof (arg) === 'object' && arg !== null;
+ }
+ },
+ number: {
+ check: function (arg) {
+ return typeof (arg) === 'number' && !isNaN(arg);
+ }
+ },
+ finite: {
+ check: function (arg) {
+ return typeof (arg) === 'number' && !isNaN(arg) && isFinite(arg);
+ }
+ },
+ buffer: {
+ check: function (arg) { return Buffer.isBuffer(arg); },
+ operator: 'Buffer.isBuffer'
+ },
+ array: {
+ check: function (arg) { return Array.isArray(arg); },
+ operator: 'Array.isArray'
+ },
+ stream: {
+ check: function (arg) { return arg instanceof Stream; },
+ operator: 'instanceof',
+ actual: _getClass
+ },
+ date: {
+ check: function (arg) { return arg instanceof Date; },
+ operator: 'instanceof',
+ actual: _getClass
+ },
+ regexp: {
+ check: function (arg) { return arg instanceof RegExp; },
+ operator: 'instanceof',
+ actual: _getClass
+ },
+ uuid: {
+ check: function (arg) {
+ return typeof (arg) === 'string' && UUID_REGEXP.test(arg);
+ },
+ operator: 'isUUID'
+ }
+};
+
+function _setExports(ndebug) {
+ var keys = Object.keys(types);
+ var out;
+
+ /* re-export standard assert */
+ if (process.env.NODE_NDEBUG) {
+ out = noop;
+ } else {
+ out = function (arg, msg) {
+ if (!arg) {
+ _toss(msg, 'true', arg);
+ }
+ };
+ }
+
+ /* standard checks */
+ keys.forEach(function (k) {
+ if (ndebug) {
+ out[k] = noop;
+ return;
+ }
+ var type = types[k];
+ out[k] = function (arg, msg) {
+ if (!type.check(arg)) {
+ _toss(msg, k, type.operator, arg, type.actual);
+ }
+ };
+ });
+
+ /* optional checks */
+ keys.forEach(function (k) {
+ var name = 'optional' + _capitalize(k);
+ if (ndebug) {
+ out[name] = noop;
+ return;
+ }
+ var type = types[k];
+ out[name] = function (arg, msg) {
+ if (arg === undefined || arg === null) {
+ return;
+ }
+ if (!type.check(arg)) {
+ _toss(msg, k, type.operator, arg, type.actual);
+ }
+ };
+ });
+
+ /* arrayOf checks */
+ keys.forEach(function (k) {
+ var name = 'arrayOf' + _capitalize(k);
+ if (ndebug) {
+ out[name] = noop;
+ return;
+ }
+ var type = types[k];
+ var expected = '[' + k + ']';
+ out[name] = function (arg, msg) {
+ if (!Array.isArray(arg)) {
+ _toss(msg, expected, type.operator, arg, type.actual);
+ }
+ var i;
+ for (i = 0; i < arg.length; i++) {
+ if (!type.check(arg[i])) {
+ _toss(msg, expected, type.operator, arg, type.actual);
+ }
+ }
+ };
+ });
+
+ /* optionalArrayOf checks */
+ keys.forEach(function (k) {
+ var name = 'optionalArrayOf' + _capitalize(k);
+ if (ndebug) {
+ out[name] = noop;
+ return;
+ }
+ var type = types[k];
+ var expected = '[' + k + ']';
+ out[name] = function (arg, msg) {
+ if (arg === undefined || arg === null) {
+ return;
+ }
+ if (!Array.isArray(arg)) {
+ _toss(msg, expected, type.operator, arg, type.actual);
+ }
+ var i;
+ for (i = 0; i < arg.length; i++) {
+ if (!type.check(arg[i])) {
+ _toss(msg, expected, type.operator, arg, type.actual);
+ }
+ }
+ };
+ });
+
+ /* re-export built-in assertions */
+ Object.keys(assert).forEach(function (k) {
+ if (k === 'AssertionError') {
+ out[k] = assert[k];
+ return;
+ }
+ if (ndebug) {
+ out[k] = noop;
+ return;
+ }
+ out[k] = assert[k];
+ });
+
+ /* export ourselves (for unit tests _only_) */
+ out._setExports = _setExports;
+
+ return out;
+}
+
+module.exports = _setExports(process.env.NODE_NDEBUG);
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/package.json
new file mode 100644
index 00000000000000..6e7ed68b9e4c03
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/assert-plus/package.json
@@ -0,0 +1,83 @@
+{
+ "author": {
+ "name": "Mark Cavage",
+ "email": "mcavage@gmail.com"
+ },
+ "name": "assert-plus",
+ "description": "Extra assertions on top of node's assert module",
+ "version": "1.0.0",
+ "license": "MIT",
+ "main": "./assert.js",
+ "devDependencies": {
+ "tape": "4.2.2",
+ "faucet": "0.0.1"
+ },
+ "optionalDependencies": {},
+ "scripts": {
+ "test": "tape tests/*.js | ./node_modules/.bin/faucet"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/mcavage/node-assert-plus.git"
+ },
+ "engines": {
+ "node": ">=0.8"
+ },
+ "contributors": [
+ {
+ "name": "Dave Eddy",
+ "email": "dave@daveeddy.com"
+ },
+ {
+ "name": "Fred Kuo",
+ "email": "fred.kuo@joyent.com"
+ },
+ {
+ "name": "Lars-Magnus Skog",
+ "email": "ralphtheninja@riseup.net"
+ },
+ {
+ "name": "Mark Cavage",
+ "email": "mcavage@gmail.com"
+ },
+ {
+ "name": "Patrick Mooney",
+ "email": "pmooney@pfmooney.com"
+ },
+ {
+ "name": "Rob Gulewich",
+ "email": "robert.gulewich@joyent.com"
+ }
+ ],
+ "bugs": {
+ "url": "https://github.com/mcavage/node-assert-plus/issues"
+ },
+ "homepage": "https://github.com/mcavage/node-assert-plus#readme",
+ "dependencies": {},
+ "_id": "assert-plus@1.0.0",
+ "_shasum": "f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525",
+ "_resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
+ "_from": "assert-plus@>=1.0.0 <2.0.0",
+ "_npmVersion": "3.3.9",
+ "_nodeVersion": "0.10.40",
+ "_npmUser": {
+ "name": "pfmooney",
+ "email": "patrick.f.mooney@gmail.com"
+ },
+ "maintainers": [
+ {
+ "name": "mcavage",
+ "email": "mcavage@gmail.com"
+ },
+ {
+ "name": "pfmooney",
+ "email": "patrick.f.mooney@gmail.com"
+ }
+ ],
+ "dist": {
+ "shasum": "f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525",
+ "tarball": "http://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz"
+ },
+ "directories": {},
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/lib/dashdash.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/lib/dashdash.js
index d57285aeb7765d..882bb3ff415970 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/lib/dashdash.js
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/lib/dashdash.js
@@ -139,7 +139,7 @@ function parseInteger(option, optstr, arg) {
function parsePositiveInteger(option, optstr, arg) {
assert.string(arg, 'arg');
var num = Number(arg);
- if (!/^[0-9]+$/.test(arg) || isNaN(num)) {
+ if (!/^[0-9]+$/.test(arg) || isNaN(num) || num === 0) {
throw new Error(format('arg for "%s" is not a positive integer: "%s"',
optstr, arg));
}
@@ -826,7 +826,7 @@ function bashCompletionSpecFromOptions(args) {
var longopts = [];
var optargs = [];
(args.options || []).forEach(function (o) {
- if (o.group) {
+ if (o.group !== undefined && o.group !== null) {
// Skip group headers.
return;
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/package.json
index eb2061d1c6fcc0..882ca5b416eb44 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/dashdash/package.json
@@ -1,7 +1,7 @@
{
"name": "dashdash",
"description": "A light, featureful and explicit option parsing library.",
- "version": "1.12.2",
+ "version": "1.13.1",
"author": {
"name": "Trent Mick",
"email": "trentm@gmail.com",
@@ -23,13 +23,13 @@
},
"main": "./lib/dashdash.js",
"dependencies": {
- "assert-plus": "^0.2.0"
+ "assert-plus": "^1.0.0"
},
"devDependencies": {
"nodeunit": "0.9.x"
},
"engines": {
- "node": ">=0.8"
+ "node": ">=0.10"
},
"scripts": {
"test": "nodeunit test/*.test.js"
@@ -52,17 +52,21 @@
{
"name": "Patrick Mooney",
"url": "https://github.com/pfmooney"
+ },
+ {
+ "name": "Dave Pacheco",
+ "url": "https://github.com/davepacheco"
}
],
- "gitHead": "d4248f21da0b30bd89c88ccee9bf6eac0e59f9f6",
+ "gitHead": "aabf8a7e71ce7ed3d24c3c57d64a57b78c1a8546",
"bugs": {
"url": "https://github.com/trentm/node-dashdash/issues"
},
"homepage": "https://github.com/trentm/node-dashdash",
- "_id": "dashdash@1.12.2",
- "_shasum": "1c6f70588498d047b8cd5777b32ba85a5e25be36",
- "_from": "dashdash@>=1.10.1 <2.0.0",
- "_npmVersion": "1.4.28",
+ "_id": "dashdash@1.13.1",
+ "_shasum": "3530ed38b9026be9af05c83423c9154122e3d47c",
+ "_from": "dashdash@>=1.12.0 <2.0.0",
+ "_npmVersion": "1.4.29",
"_npmUser": {
"name": "trentm",
"email": "trentm@gmail.com"
@@ -74,10 +78,14 @@
}
],
"dist": {
- "shasum": "1c6f70588498d047b8cd5777b32ba85a5e25be36",
- "tarball": "http://registry.npmjs.org/dashdash/-/dashdash-1.12.2.tgz"
+ "shasum": "3530ed38b9026be9af05c83423c9154122e3d47c",
+ "tarball": "https://registry.npmjs.org/dashdash/-/dashdash-1.13.1.tgz"
+ },
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/dashdash-1.13.1.tgz_1461355198185_0.31851457548327744"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.12.2.tgz",
+ "_resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.13.1.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/ecc-jsbn/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/ecc-jsbn/package.json
index 8c7fa540f7aec2..2bb7aa8ee81705 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/ecc-jsbn/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/ecc-jsbn/package.json
@@ -39,7 +39,7 @@
"_id": "ecc-jsbn@0.1.1",
"scripts": {},
"_shasum": "0fc73a9ed5f0d53c38193398523ef7e543777505",
- "_from": "ecc-jsbn@>=0.0.1 <1.0.0",
+ "_from": "ecc-jsbn@>=0.1.1 <0.2.0",
"_npmVersion": "2.11.2",
"_nodeVersion": "0.12.6",
"_npmUser": {
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.npmignore b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.npmignore
new file mode 100644
index 00000000000000..a4261fc06feaaf
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.npmignore
@@ -0,0 +1,8 @@
+.gitmodules
+deps
+docs
+Makefile
+node_modules
+test
+tools
+coverage
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.travis.yml b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.travis.yml
new file mode 100644
index 00000000000000..d8b5833a71b22c
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/.travis.yml
@@ -0,0 +1,9 @@
+language: node_js
+node_js:
+ - "5.10"
+ - "4.4"
+ - "4.1"
+ - "0.12"
+ - "0.10"
+before_install:
+ - "make check"
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/LICENSE b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/LICENSE
new file mode 100644
index 00000000000000..f6d947d2f61c41
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/LICENSE
@@ -0,0 +1,18 @@
+Copyright Joyent, Inc. All rights reserved.
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/README.md b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/README.md
new file mode 100644
index 00000000000000..6e4a50f63f7f00
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/README.md
@@ -0,0 +1,32 @@
+## getpass
+
+Get a password from the terminal. Sounds simple? Sounds like the `readline`
+module should be able to do it? NOPE.
+
+## Install and use it
+
+```bash
+npm install --save getpass
+```
+
+```javascript
+const mod_getpass = require('getpass');
+```
+
+## API
+
+### `mod_getpass.getPass([options, ]callback)`
+
+Gets a password from the terminal. If available, this uses `/dev/tty` to avoid
+interfering with any data being piped in or out of stdio.
+
+This function prints a prompt (by default `Password:`) and then accepts input
+without echoing.
+
+Parameters:
+
+ * `options`, an Object, with properties:
+ * `prompt`, an optional String
+ * `callback`, a `Func(error, password)`, with arguments:
+ * `error`, either `null` (no error) or an `Error` instance
+ * `password`, a String
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/lib/index.js b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/lib/index.js
new file mode 100644
index 00000000000000..55a7718c0f7f72
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/lib/index.js
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2016, Joyent, Inc. All rights reserved.
+ * Author: Alex Wilson
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to
+ * deal in the Software without restriction, including without limitation the
+ * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+ * sell copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+*/
+
+module.exports = {
+ getPass: getPass
+};
+
+const mod_tty = require('tty');
+const mod_fs = require('fs');
+const mod_assert = require('assert-plus');
+
+var BACKSPACE = String.fromCharCode(127);
+var CTRLC = '\u0003';
+var CTRLD = '\u0004';
+
+function getPass(opts, cb) {
+ if (typeof (opts) === 'function' && cb === undefined) {
+ cb = opts;
+ opts = {};
+ }
+ mod_assert.object(opts, 'options');
+ mod_assert.func(cb, 'callback');
+
+ mod_assert.optionalString(opts.prompt, 'options.prompt');
+ if (opts.prompt === undefined)
+ opts.prompt = 'Password';
+
+ openTTY(function (err, rfd, wfd, rtty, wtty) {
+ if (err) {
+ cb(err);
+ return;
+ }
+
+ wtty.write(opts.prompt + ':');
+ rtty.resume();
+ rtty.setRawMode(true);
+ rtty.resume();
+ rtty.setEncoding('utf8');
+
+ var pw = '';
+ rtty.on('data', onData);
+
+ function onData(data) {
+ var str = data.toString('utf8');
+ for (var i = 0; i < str.length; ++i) {
+ var ch = str[i];
+ switch (ch) {
+ case '\r':
+ case '\n':
+ case CTRLD:
+ cleanup();
+ cb(null, pw);
+ return;
+ case CTRLC:
+ cleanup();
+ cb(new Error('Aborted'));
+ return;
+ case BACKSPACE:
+ pw = pw.slice(0, pw.length - 1);
+ break;
+ default:
+ pw += ch;
+ break;
+ }
+ }
+ }
+
+ function cleanup() {
+ wtty.write('\r\n');
+ rtty.setRawMode(false);
+ rtty.pause();
+ rtty.removeListener('data', onData);
+ if (wfd !== undefined && wfd !== rfd) {
+ wtty.end();
+ mod_fs.closeSync(wfd);
+ }
+ if (rfd !== undefined) {
+ rtty.end();
+ mod_fs.closeSync(rfd);
+ }
+ }
+ });
+}
+
+function openTTY(cb) {
+ mod_fs.open('/dev/tty', 'r+', function (err, rttyfd) {
+ if ((err && (err.code === 'ENOENT' || err.code === 'EACCES')) ||
+ (process.version.match(/^v0[.][0-8][.]/))) {
+ cb(null, undefined, undefined, process.stdin,
+ process.stdout);
+ return;
+ }
+ var rtty = new mod_tty.ReadStream(rttyfd);
+ mod_fs.open('/dev/tty', 'w+', function (err3, wttyfd) {
+ var wtty = new mod_tty.WriteStream(wttyfd);
+ if (err3) {
+ cb(err3);
+ return;
+ }
+ cb(null, rttyfd, wttyfd, rtty, wtty);
+ });
+ });
+}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/package.json
new file mode 100644
index 00000000000000..93a8042d2dfa13
--- /dev/null
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/getpass/package.json
@@ -0,0 +1,57 @@
+{
+ "name": "getpass",
+ "version": "0.1.6",
+ "description": "getpass for node.js",
+ "main": "lib/index.js",
+ "dependencies": {
+ "assert-plus": "^1.0.0"
+ },
+ "devDependencies": {
+ "json": "^9.0.3",
+ "pty.js": "^0.3.0",
+ "tape": "^4.4.0"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/arekinath/node-getpass.git"
+ },
+ "scripts": {
+ "test": "tape test/*.test.js"
+ },
+ "author": {
+ "name": "Alex Wilson",
+ "email": "alex.wilson@joyent.com"
+ },
+ "license": "MIT",
+ "gitHead": "e7fdf43ad60aa520f894d41856852aa320f36646",
+ "bugs": {
+ "url": "https://github.com/arekinath/node-getpass/issues"
+ },
+ "homepage": "https://github.com/arekinath/node-getpass#readme",
+ "_id": "getpass@0.1.6",
+ "_shasum": "283ffd9fc1256840875311c1b60e8c40187110e6",
+ "_from": "getpass@>=0.1.1 <0.2.0",
+ "_npmVersion": "2.14.9",
+ "_nodeVersion": "0.12.9",
+ "_npmUser": {
+ "name": "arekinath",
+ "email": "alex@cooperi.net"
+ },
+ "dist": {
+ "shasum": "283ffd9fc1256840875311c1b60e8c40187110e6",
+ "tarball": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "arekinath",
+ "email": "alex@cooperi.net"
+ }
+ ],
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/getpass-0.1.6.tgz_1461907090215_0.6450737570412457"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.6.tgz",
+ "readme": "ERROR: No README data found!"
+}
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/tweetnacl/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/tweetnacl/package.json
index 68282397db2200..bc307dc741015d 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/tweetnacl/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/node_modules/tweetnacl/package.json
@@ -72,7 +72,7 @@
"gitHead": "2bb422cb707fba4a5ec9654688564a4fb861b068",
"_id": "tweetnacl@0.13.3",
"_shasum": "d628b56f3bcc3d5ae74ba9d4c1a704def5ab4b56",
- "_from": "tweetnacl@>=0.13.0 <1.0.0",
+ "_from": "tweetnacl@>=0.13.0 <0.14.0",
"_npmVersion": "2.14.7",
"_nodeVersion": "4.2.3",
"_npmUser": {
@@ -81,7 +81,7 @@
},
"dist": {
"shasum": "d628b56f3bcc3d5ae74ba9d4c1a704def5ab4b56",
- "tarball": "http://registry.npmjs.org/tweetnacl/-/tweetnacl-0.13.3.tgz"
+ "tarball": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.13.3.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/package.json b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/package.json
index aa1366619749cb..f46a6bd9f2254d 100644
--- a/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/package.json
+++ b/deps/npm/node_modules/request/node_modules/http-signature/node_modules/sshpk/package.json
@@ -1,6 +1,6 @@
{
"name": "sshpk",
- "version": "1.7.3",
+ "version": "1.8.3",
"description": "A library for finding and using SSH public keys",
"main": "lib/index.js",
"scripts": {
@@ -32,7 +32,7 @@
"url": "https://github.com/arekinath/node-sshpk/issues"
},
"engines": {
- "node": ">=0.8.0"
+ "node": ">=0.10.0"
},
"directories": {
"bin": "./bin",
@@ -41,25 +41,26 @@
},
"homepage": "https://github.com/arekinath/node-sshpk#readme",
"dependencies": {
- "asn1": ">=0.2.3 <0.3.0",
- "assert-plus": ">=0.2.0 <0.3.0",
- "dashdash": ">=1.10.1 <2.0.0",
- "jsbn": ">=0.1.0 <0.2.0",
- "tweetnacl": ">=0.13.0 <1.0.0",
- "jodid25519": ">=1.0.0 <2.0.0",
- "ecc-jsbn": ">=0.0.1 <1.0.0"
+ "asn1": "~0.2.3",
+ "assert-plus": "^1.0.0",
+ "dashdash": "^1.12.0",
+ "getpass": "^0.1.1",
+ "jsbn": "~0.1.0",
+ "tweetnacl": "~0.13.0",
+ "jodid25519": "^1.0.0",
+ "ecc-jsbn": "~0.1.1"
},
"optionalDependencies": {
- "jsbn": ">=0.1.0 <0.2.0",
- "tweetnacl": ">=0.13.0 <1.0.0",
- "jodid25519": ">=1.0.0 <2.0.0",
- "ecc-jsbn": ">=0.0.1 <1.0.0"
+ "jsbn": "~0.1.0",
+ "tweetnacl": "~0.13.0",
+ "jodid25519": "^1.0.0",
+ "ecc-jsbn": "~0.1.1"
},
"devDependencies": {
- "tape": ">=3.5.0 <4.0.0",
- "benchmark": ">=1.0.0 <2.0.0",
- "sinon": ">=1.17.2 <2.0.0",
- "temp": "0.8.2"
+ "tape": "^3.5.0",
+ "benchmark": "^1.0.0",
+ "sinon": "^1.17.2",
+ "temp": "^0.8.2"
},
"man": [
"/Users/alex.wilson/dev/sshpk/man/man1/sshpk-conv.1",
@@ -71,19 +72,19 @@
"sshpk-sign": "bin/sshpk-sign",
"sshpk-verify": "bin/sshpk-verify"
},
- "gitHead": "3d98bfc22bb1c09f0747244acbb408f3ca9448b5",
- "_id": "sshpk@1.7.3",
- "_shasum": "caa8ef95e30765d856698b7025f9f211ab65962f",
+ "gitHead": "82d39066b2df4e8284350ff5ebb08c5b95c74652",
+ "_id": "sshpk@1.8.3",
+ "_shasum": "890cc9d614dc5292e5cb1a543b03c9abaa5c374e",
"_from": "sshpk@>=1.7.0 <2.0.0",
- "_npmVersion": "2.14.9",
- "_nodeVersion": "0.12.9",
+ "_npmVersion": "2.15.1",
+ "_nodeVersion": "0.12.13",
"_npmUser": {
"name": "arekinath",
"email": "alex@cooperi.net"
},
"dist": {
- "shasum": "caa8ef95e30765d856698b7025f9f211ab65962f",
- "tarball": "http://registry.npmjs.org/sshpk/-/sshpk-1.7.3.tgz"
+ "shasum": "890cc9d614dc5292e5cb1a543b03c9abaa5c374e",
+ "tarball": "https://registry.npmjs.org/sshpk/-/sshpk-1.8.3.tgz"
},
"maintainers": [
{
@@ -91,6 +92,10 @@
"email": "alex@cooperi.net"
}
],
- "_resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.7.3.tgz",
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/sshpk-1.8.3.tgz_1461968607532_0.32797130732797086"
+ },
+ "_resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.8.3.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/json-stringify-safe/package.json b/deps/npm/node_modules/request/node_modules/json-stringify-safe/package.json
index ccd55b008dfa64..1ba97c94186a52 100644
--- a/deps/npm/node_modules/request/node_modules/json-stringify-safe/package.json
+++ b/deps/npm/node_modules/request/node_modules/json-stringify-safe/package.json
@@ -50,7 +50,7 @@
},
"dist": {
"shasum": "1296a2d58fd45f19a0f6ce01d65701e2c735b6eb",
- "tarball": "http://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz"
+ "tarball": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz"
},
"maintainers": [
{
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md b/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
index 61b54b4201e30b..63bd4ea0b40ce4 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/HISTORY.md
@@ -1,3 +1,17 @@
+2.1.11 / 2016-05-01
+===================
+
+ * deps: mime-db@~1.23.0
+ - Add new mime types
+
+2.1.10 / 2016-02-15
+===================
+
+ * deps: mime-db@~1.22.0
+ - Add new mime types
+ - Fix extension of `application/dash+xml`
+ - Update primary extension for `audio/mp4`
+
2.1.9 / 2016-01-06
==================
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/README.md b/deps/npm/node_modules/request/node_modules/mime-types/README.md
index e26295d0478b7e..e77d615d3e6de0 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/README.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/README.md
@@ -94,7 +94,7 @@ A map of extensions by content-type.
[npm-image]: https://img.shields.io/npm/v/mime-types.svg
[npm-url]: https://npmjs.org/package/mime-types
[node-version-image]: https://img.shields.io/node/v/mime-types.svg
-[node-version-url]: http://nodejs.org/download/
+[node-version-url]: https://nodejs.org/en/download/
[travis-image]: https://img.shields.io/travis/jshttp/mime-types/master.svg
[travis-url]: https://travis-ci.org/jshttp/mime-types
[coveralls-image]: https://img.shields.io/coveralls/jshttp/mime-types/master.svg
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
index 41a667af5cd6e1..d6705ac86d114f 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/HISTORY.md
@@ -1,3 +1,38 @@
+1.23.0 / 2016-05-01
+===================
+
+ * Add `application/efi`
+ * Add `application/vnd.3gpp.sms+xml`
+ * Add `application/vnd.3lightssoftware.imagescal`
+ * Add `application/vnd.coreos.ignition+json`
+ * Add `application/vnd.desmume.movie`
+ * Add `application/vnd.onepager`
+ * Add `application/vnd.vel+json`
+ * Add `text/prs.prop.logic`
+ * Add `video/encaprtp`
+ * Add `video/h265`
+ * Add `video/iso.segment`
+ * Add `video/raptorfec`
+ * Add `video/rtploopback`
+ * Add `video/vnd.radgamettools.bink`
+ * Add `video/vnd.radgamettools.smacker`
+ * Add `video/vp8`
+ * Add extension `.3gpp` to `audio/3gpp`
+
+1.22.0 / 2016-02-15
+===================
+
+ * Add `application/ppsp-tracker+json`
+ * Add `application/problem+json`
+ * Add `application/problem+xml`
+ * Add `application/vnd.hdt`
+ * Add `application/vnd.ms-printschematicket+xml`
+ * Add `model/vnd.rosette.annotated-data-model`
+ * Add `text/slim`
+ * Add extension `.rng` to `application/xml`
+ * Fix extension of `application/dash+xml` to be `.mpd`
+ * Update primary extension to `.m4a` for `audio/mp4`
+
1.21.0 / 2016-01-06
===================
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
index 412ba9ed65e1e0..0a5a8a7bba5574 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/db.json
@@ -217,7 +217,7 @@
},
"application/dash+xml": {
"source": "iana",
- "extensions": ["mdp"]
+ "extensions": ["mpd"]
},
"application/dashdelta": {
"source": "iana"
@@ -284,6 +284,9 @@
"source": "iana",
"compressible": false
},
+ "application/efi": {
+ "source": "iana"
+ },
"application/emergencycalldata.comment+xml": {
"source": "iana"
},
@@ -837,6 +840,17 @@
"compressible": true,
"extensions": ["ai","eps","ps"]
},
+ "application/ppsp-tracker+json": {
+ "source": "iana",
+ "compressible": true
+ },
+ "application/problem+json": {
+ "source": "iana",
+ "compressible": true
+ },
+ "application/problem+xml": {
+ "source": "iana"
+ },
"application/provenance+xml": {
"source": "iana"
},
@@ -1206,6 +1220,9 @@
"application/vnd.3gpp.sms": {
"source": "iana"
},
+ "application/vnd.3gpp.sms+xml": {
+ "source": "iana"
+ },
"application/vnd.3gpp.srvcc-ext+xml": {
"source": "iana"
},
@@ -1228,6 +1245,9 @@
"source": "iana",
"extensions": ["tcap"]
},
+ "application/vnd.3lightssoftware.imagescal": {
+ "source": "iana"
+ },
"application/vnd.3m.post-it-notes": {
"source": "iana",
"extensions": ["pwn"]
@@ -1486,6 +1506,10 @@
"source": "iana",
"extensions": ["cdbcmsg"]
},
+ "application/vnd.coreos.ignition+json": {
+ "source": "iana",
+ "compressible": true
+ },
"application/vnd.cosmocaller": {
"source": "iana",
"extensions": ["cmc"]
@@ -1589,6 +1613,9 @@
"application/vnd.desmume-movie": {
"source": "iana"
},
+ "application/vnd.desmume.movie": {
+ "source": "apache"
+ },
"application/vnd.dir-bi.plate-dl-nosuffix": {
"source": "iana"
},
@@ -2093,6 +2120,9 @@
"application/vnd.hcl-bireports": {
"source": "iana"
},
+ "application/vnd.hdt": {
+ "source": "iana"
+ },
"application/vnd.heroku+json": {
"source": "iana",
"compressible": true
@@ -2687,6 +2717,9 @@
"application/vnd.ms-printing.printticket+xml": {
"source": "apache"
},
+ "application/vnd.ms-printschematicket+xml": {
+ "source": "iana"
+ },
"application/vnd.ms-project": {
"source": "iana",
"extensions": ["mpp","mpt"]
@@ -3121,6 +3154,9 @@
"application/vnd.omaloc-supl-init": {
"source": "iana"
},
+ "application/vnd.onepager": {
+ "source": "iana"
+ },
"application/vnd.openblox.game+xml": {
"source": "iana"
},
@@ -3964,6 +4000,10 @@
"application/vnd.vectorworks": {
"source": "iana"
},
+ "application/vnd.vel+json": {
+ "source": "iana",
+ "compressible": true
+ },
"application/vnd.verimatrix.vcas": {
"source": "iana"
},
@@ -4749,7 +4789,7 @@
"application/xml": {
"source": "iana",
"compressible": true,
- "extensions": ["xml","xsl","xsd"]
+ "extensions": ["xml","xsl","xsd","rng"]
},
"application/xml-dtd": {
"source": "iana",
@@ -4809,7 +4849,9 @@
"source": "iana"
},
"audio/3gpp": {
- "source": "iana"
+ "source": "iana",
+ "compressible": false,
+ "extensions": ["3gpp"]
},
"audio/3gpp2": {
"source": "iana"
@@ -5023,7 +5065,7 @@
"audio/mp4": {
"source": "iana",
"compressible": false,
- "extensions": ["mp4a","m4a"]
+ "extensions": ["m4a","mp4a"]
},
"audio/mp4a-latm": {
"source": "iana"
@@ -5774,6 +5816,9 @@
"model/vnd.parasolid.transmit.text": {
"source": "iana"
},
+ "model/vnd.rosette.annotated-data-model": {
+ "source": "iana"
+ },
"model/vnd.valve.source.compiled-map": {
"source": "iana"
},
@@ -5971,6 +6016,9 @@
"source": "iana",
"extensions": ["dsc"]
},
+ "text/prs.prop.logic": {
+ "source": "iana"
+ },
"text/raptorfec": {
"source": "iana"
},
@@ -6003,6 +6051,9 @@
"source": "iana",
"extensions": ["sgml","sgm"]
},
+ "text/slim": {
+ "extensions": ["slim","slm"]
+ },
"text/stylus": {
"extensions": ["stylus","styl"]
},
@@ -6259,6 +6310,9 @@
"video/dv": {
"source": "apache"
},
+ "video/encaprtp": {
+ "source": "apache"
+ },
"video/h261": {
"source": "apache",
"extensions": ["h261"]
@@ -6283,6 +6337,12 @@
"video/h264-svc": {
"source": "apache"
},
+ "video/h265": {
+ "source": "apache"
+ },
+ "video/iso.segment": {
+ "source": "apache"
+ },
"video/jpeg": {
"source": "apache",
"extensions": ["jpgv"]
@@ -6346,12 +6406,18 @@
"compressible": false,
"extensions": ["qt","mov"]
},
+ "video/raptorfec": {
+ "source": "apache"
+ },
"video/raw": {
"source": "apache"
},
"video/rtp-enc-aescm128": {
"source": "apache"
},
+ "video/rtploopback": {
+ "source": "apache"
+ },
"video/rtx": {
"source": "apache"
},
@@ -6451,6 +6517,12 @@
"video/vnd.objectvideo": {
"source": "apache"
},
+ "video/vnd.radgamettools.bink": {
+ "source": "apache"
+ },
+ "video/vnd.radgamettools.smacker": {
+ "source": "apache"
+ },
"video/vnd.sealed.mpeg1": {
"source": "apache"
},
@@ -6471,6 +6543,9 @@
"source": "apache",
"extensions": ["viv"]
},
+ "video/vp8": {
+ "source": "apache"
+ },
"video/webm": {
"source": "apache",
"compressible": false,
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
index e6c973296cda56..d1af1d098dcdeb 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/node_modules/mime-db/package.json
@@ -1,7 +1,7 @@
{
"name": "mime-db",
"description": "Media Type Database",
- "version": "1.21.0",
+ "version": "1.23.0",
"contributors": [
{
"name": "Douglas Christopher Wilson",
@@ -33,15 +33,15 @@
"url": "git+https://github.com/jshttp/mime-db.git"
},
"devDependencies": {
- "bluebird": "3.1.1",
+ "bluebird": "3.3.5",
"co": "4.6.0",
"cogent": "1.0.1",
- "csv-parse": "1.0.1",
- "gnode": "0.1.1",
- "istanbul": "0.4.1",
+ "csv-parse": "1.1.0",
+ "gnode": "0.1.2",
+ "istanbul": "0.4.3",
"mocha": "1.21.5",
- "raw-body": "2.1.5",
- "stream-to-array": "2.2.0"
+ "raw-body": "2.1.6",
+ "stream-to-array": "2.3.0"
},
"files": [
"HISTORY.md",
@@ -61,34 +61,39 @@
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/",
"update": "npm run fetch && npm run build"
},
- "gitHead": "9ab92f0a912a602408a64db5741dfef6f82c597f",
+ "gitHead": "ba0d99fd05b3bfdc2ebcd78f858c25cb7db6af41",
"bugs": {
"url": "https://github.com/jshttp/mime-db/issues"
},
- "homepage": "https://github.com/jshttp/mime-db",
- "_id": "mime-db@1.21.0",
- "_shasum": "9b5239e3353cf6eb015a00d890261027c36d4bac",
- "_from": "mime-db@>=1.21.0 <1.22.0",
- "_npmVersion": "1.4.28",
+ "homepage": "https://github.com/jshttp/mime-db#readme",
+ "_id": "mime-db@1.23.0",
+ "_shasum": "a31b4070adaea27d732ea333740a64d0ec9a6659",
+ "_from": "mime-db@>=1.23.0 <1.24.0",
+ "_npmVersion": "2.15.1",
+ "_nodeVersion": "4.4.3",
"_npmUser": {
"name": "dougwilson",
"email": "doug@somethingdoug.com"
},
+ "dist": {
+ "shasum": "a31b4070adaea27d732ea333740a64d0ec9a6659",
+ "tarball": "https://registry.npmjs.org/mime-db/-/mime-db-1.23.0.tgz"
+ },
"maintainers": [
- {
- "name": "jongleberry",
- "email": "jonathanrichardong@gmail.com"
- },
{
"name": "dougwilson",
"email": "doug@somethingdoug.com"
+ },
+ {
+ "name": "jongleberry",
+ "email": "jonathanrichardong@gmail.com"
}
],
- "dist": {
- "shasum": "9b5239e3353cf6eb015a00d890261027c36d4bac",
- "tarball": "http://registry.npmjs.org/mime-db/-/mime-db-1.21.0.tgz"
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/mime-db-1.23.0.tgz_1462163798086_0.43938886746764183"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.21.0.tgz",
+ "_resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.23.0.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/mime-types/package.json b/deps/npm/node_modules/request/node_modules/mime-types/package.json
index ff5993558151da..14188b807c594e 100644
--- a/deps/npm/node_modules/request/node_modules/mime-types/package.json
+++ b/deps/npm/node_modules/request/node_modules/mime-types/package.json
@@ -1,7 +1,7 @@
{
"name": "mime-types",
"description": "The ultimate javascript content-type utility.",
- "version": "2.1.9",
+ "version": "2.1.11",
"contributors": [
{
"name": "Douglas Christopher Wilson",
@@ -28,11 +28,11 @@
"url": "git+https://github.com/jshttp/mime-types.git"
},
"dependencies": {
- "mime-db": "~1.21.0"
+ "mime-db": "~1.23.0"
},
"devDependencies": {
- "istanbul": "0.4.1",
- "mocha": "~1.21.5"
+ "istanbul": "0.4.3",
+ "mocha": "1.21.5"
},
"files": [
"HISTORY.md",
@@ -47,38 +47,43 @@
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/test.js",
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter dot test/test.js"
},
- "gitHead": "329f1c77e1a77c8fac59b15038e3808e9e314d96",
+ "gitHead": "298ffcf490a5d6e60edea7bf7a69036df04846b1",
"bugs": {
"url": "https://github.com/jshttp/mime-types/issues"
},
- "homepage": "https://github.com/jshttp/mime-types",
- "_id": "mime-types@2.1.9",
- "_shasum": "dfb396764b5fdf75be34b1f4104bc3687fb635f8",
+ "homepage": "https://github.com/jshttp/mime-types#readme",
+ "_id": "mime-types@2.1.11",
+ "_shasum": "c259c471bda808a85d6cd193b430a5fae4473b3c",
"_from": "mime-types@>=2.1.7 <2.2.0",
- "_npmVersion": "1.4.28",
+ "_npmVersion": "2.15.1",
+ "_nodeVersion": "4.4.3",
"_npmUser": {
"name": "dougwilson",
"email": "doug@somethingdoug.com"
},
+ "dist": {
+ "shasum": "c259c471bda808a85d6cd193b430a5fae4473b3c",
+ "tarball": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.11.tgz"
+ },
"maintainers": [
{
- "name": "jongleberry",
- "email": "jonathanrichardong@gmail.com"
+ "name": "dougwilson",
+ "email": "doug@somethingdoug.com"
},
{
"name": "fishrock123",
"email": "fishrock123@rocketmail.com"
},
{
- "name": "dougwilson",
- "email": "doug@somethingdoug.com"
+ "name": "jongleberry",
+ "email": "jonathanrichardong@gmail.com"
}
],
- "dist": {
- "shasum": "dfb396764b5fdf75be34b1f4104bc3687fb635f8",
- "tarball": "http://registry.npmjs.org/mime-types/-/mime-types-2.1.9.tgz"
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/mime-types-2.1.11.tgz_1462165365027_0.7217204745393246"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.9.tgz",
+ "_resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.11.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/node-uuid/package.json b/deps/npm/node_modules/request/node_modules/node-uuid/package.json
index 473b25a8ff52ae..2f578874ec7ed4 100644
--- a/deps/npm/node_modules/request/node_modules/node-uuid/package.json
+++ b/deps/npm/node_modules/request/node_modules/node-uuid/package.json
@@ -74,7 +74,7 @@
},
"dist": {
"shasum": "6da5a17668c4b3dd59623bda11cf7fa4c1f60a6f",
- "tarball": "http://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz"
+ "tarball": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz"
},
"_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.7.tgz",
"readme": "ERROR: No README data found!"
diff --git a/deps/npm/node_modules/request/node_modules/oauth-sign/package.json b/deps/npm/node_modules/request/node_modules/oauth-sign/package.json
index 70c01e2f053b3e..cf1eeb77cb5aae 100644
--- a/deps/npm/node_modules/request/node_modules/oauth-sign/package.json
+++ b/deps/npm/node_modules/request/node_modules/oauth-sign/package.json
@@ -6,12 +6,15 @@
},
"name": "oauth-sign",
"description": "OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.",
- "version": "0.8.1",
+ "version": "0.8.2",
"license": "Apache-2.0",
"repository": {
"url": "git+https://github.com/mikeal/oauth-sign.git"
},
"main": "index.js",
+ "files": [
+ "index.js"
+ ],
"dependencies": {},
"devDependencies": {},
"optionalDependencies": {},
@@ -21,23 +24,23 @@
"scripts": {
"test": "node test.js"
},
- "gitHead": "9c7229a336c9face98b83f93b72cb7c80dbba08d",
+ "gitHead": "0b034206316132f57e26970152c2fb18e71bddd5",
"bugs": {
"url": "https://github.com/mikeal/oauth-sign/issues"
},
"homepage": "https://github.com/mikeal/oauth-sign#readme",
- "_id": "oauth-sign@0.8.1",
- "_shasum": "182439bdb91378bf7460e75c64ea43e6448def06",
- "_from": "oauth-sign@>=0.8.0 <0.9.0",
- "_npmVersion": "3.6.0",
- "_nodeVersion": "5.5.0",
+ "_id": "oauth-sign@0.8.2",
+ "_shasum": "46a6ab7f0aead8deae9ec0565780b7d4efeb9d43",
+ "_from": "oauth-sign@>=0.8.1 <0.9.0",
+ "_npmVersion": "2.15.3",
+ "_nodeVersion": "5.9.0",
"_npmUser": {
"name": "simov",
"email": "simeonvelichkov@gmail.com"
},
"dist": {
- "shasum": "182439bdb91378bf7460e75c64ea43e6448def06",
- "tarball": "http://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.1.tgz"
+ "shasum": "46a6ab7f0aead8deae9ec0565780b7d4efeb9d43",
+ "tarball": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz"
},
"maintainers": [
{
@@ -53,7 +56,11 @@
"email": "simeonvelichkov@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/oauth-sign-0.8.2.tgz_1462396399020_0.8175400267355144"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.1.tgz",
+ "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/oauth-sign/test.js b/deps/npm/node_modules/request/node_modules/oauth-sign/test.js
deleted file mode 100644
index a8847270df5e3b..00000000000000
--- a/deps/npm/node_modules/request/node_modules/oauth-sign/test.js
+++ /dev/null
@@ -1,89 +0,0 @@
-var oauth = require('./index')
- , hmacsign = oauth.hmacsign
- , assert = require('assert')
- , qs = require('querystring')
- ;
-
-// Tests from Twitter documentation https://dev.twitter.com/docs/auth/oauth
-
-var reqsign = hmacsign('POST', 'https://api.twitter.com/oauth/request_token',
- { oauth_callback: 'http://localhost:3005/the_dance/process_callback?service_provider_id=11'
- , oauth_consumer_key: 'GDdmIQH6jhtmLUypg82g'
- , oauth_nonce: 'QP70eNmVz8jvdPevU3oJD2AfF7R7odC2XJcn4XlZJqk'
- , oauth_signature_method: 'HMAC-SHA1'
- , oauth_timestamp: '1272323042'
- , oauth_version: '1.0'
- }, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98")
-
-console.log(reqsign)
-console.log('8wUi7m5HFQy76nowoCThusfgB+Q=')
-assert.equal(reqsign, '8wUi7m5HFQy76nowoCThusfgB+Q=')
-
-var accsign = hmacsign('POST', 'https://api.twitter.com/oauth/access_token',
- { oauth_consumer_key: 'GDdmIQH6jhtmLUypg82g'
- , oauth_nonce: '9zWH6qe0qG7Lc1telCn7FhUbLyVdjEaL3MO5uHxn8'
- , oauth_signature_method: 'HMAC-SHA1'
- , oauth_token: '8ldIZyxQeVrFZXFOZH5tAwj6vzJYuLQpl0WUEYtWc'
- , oauth_timestamp: '1272323047'
- , oauth_verifier: 'pDNg57prOHapMbhv25RNf75lVRd6JDsni1AJJIDYoTY'
- , oauth_version: '1.0'
- }, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98", "x6qpRnlEmW9JbQn4PQVVeVG8ZLPEx6A0TOebgwcuA")
-
-console.log(accsign)
-console.log('PUw/dHA4fnlJYM6RhXk5IU/0fCc=')
-assert.equal(accsign, 'PUw/dHA4fnlJYM6RhXk5IU/0fCc=')
-
-var upsign = hmacsign('POST', 'http://api.twitter.com/1/statuses/update.json',
- { oauth_consumer_key: "GDdmIQH6jhtmLUypg82g"
- , oauth_nonce: "oElnnMTQIZvqvlfXM56aBLAf5noGD0AQR3Fmi7Q6Y"
- , oauth_signature_method: "HMAC-SHA1"
- , oauth_token: "819797-Jxq8aYUDRmykzVKrgoLhXSq67TEa5ruc4GJC2rWimw"
- , oauth_timestamp: "1272325550"
- , oauth_version: "1.0"
- , status: 'setting up my twitter 私のさえずりを設定する'
- }, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98", "J6zix3FfA9LofH0awS24M3HcBYXO5nI1iYe8EfBA")
-
-console.log(upsign)
-console.log('yOahq5m0YjDDjfjxHaXEsW9D+X0=')
-assert.equal(upsign, 'yOahq5m0YjDDjfjxHaXEsW9D+X0=')
-
-// handle objects in params (useful for Wordpress REST API)
-var upsign = hmacsign('POST', 'http://wordpress.com/wp-json',
- { oauth_consumer_key: "GDdmIQH6jhtmLUypg82g"
- , oauth_nonce: "oElnnMTQIZvqvlfXM56aBLAf5noGD0AQR3Fmi7Q6Y"
- , oauth_signature_method: "HMAC-SHA1"
- , oauth_token: "819797-Jxq8aYUDRmykzVKrgoLhXSq67TEa5ruc4GJC2rWimw"
- , oauth_timestamp: "1272325550"
- , oauth_version: "1.0"
- , filter: { number: "-1" }
- }, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98", "J6zix3FfA9LofH0awS24M3HcBYXO5nI1iYe8EfBA")
-
-console.log(upsign)
-console.log('YrJFBdwnjuIitGpKrxLUplcuuUQ=')
-assert.equal(upsign, 'YrJFBdwnjuIitGpKrxLUplcuuUQ=')
-
-// example in rfc5849
-var params = qs.parse('b5=%3D%253D&a3=a&c%40=&a2=r%20b' + '&' + 'c2&a3=2+q')
-params.oauth_consumer_key = '9djdj82h48djs9d2'
-params.oauth_token = 'kkk9d7dh3k39sjv7'
-params.oauth_nonce = '7d8f3e4a'
-params.oauth_signature_method = 'HMAC-SHA1'
-params.oauth_timestamp = '137131201'
-
-var rfc5849sign = hmacsign('POST', 'http://example.com/request',
- params, "j49sk3j29djd", "dh893hdasih9")
-
-console.log(rfc5849sign)
-console.log('r6/TJjbCOr97/+UU0NsvSne7s5g=')
-assert.equal(rfc5849sign, 'r6/TJjbCOr97/+UU0NsvSne7s5g=')
-
-
-// PLAINTEXT
-
-var plainSign = oauth.sign('PLAINTEXT', 'GET', 'http://dummy.com', {}, 'consumer_secret', 'token_secret')
-console.log(plainSign)
-assert.equal(plainSign, 'consumer_secret&token_secret')
-
-plainSign = oauth.plaintext('consumer_secret', 'token_secret')
-console.log(plainSign)
-assert.equal(plainSign, 'consumer_secret&token_secret')
diff --git a/deps/npm/node_modules/request/node_modules/qs/CHANGELOG.md b/deps/npm/node_modules/request/node_modules/qs/CHANGELOG.md
index 2df5e9d3783b5b..5c66ea44464cb1 100644
--- a/deps/npm/node_modules/request/node_modules/qs/CHANGELOG.md
+++ b/deps/npm/node_modules/request/node_modules/qs/CHANGELOG.md
@@ -1,3 +1,8 @@
+## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=34&state=closed)
+- [New] allowDots option for `stringify` (#151)
+- [Fix] "sort" option should work at a depth of 3 or more (#151)
+- [Fix] Restore `dist` directory; will be removed in v7 (#148)
+
## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed)
- Revert ES6 requirement and restore support for node down to v0.8.
diff --git a/deps/npm/node_modules/request/node_modules/qs/bower.json b/deps/npm/node_modules/request/node_modules/qs/bower.json
index 8b21420a643286..44f050642d73db 100644
--- a/deps/npm/node_modules/request/node_modules/qs/bower.json
+++ b/deps/npm/node_modules/request/node_modules/qs/bower.json
@@ -1,7 +1,6 @@
{
"name": "qs",
"main": "dist/qs.js",
- "version": "5.2.0",
"homepage": "https://github.com/hapijs/qs",
"authors": [
"Nathan LaFreniere "
diff --git a/deps/npm/node_modules/request/node_modules/qs/component.json b/deps/npm/node_modules/request/node_modules/qs/component.json
index fca856a631ae37..cb8d93fb1fe8f7 100644
--- a/deps/npm/node_modules/request/node_modules/qs/component.json
+++ b/deps/npm/node_modules/request/node_modules/qs/component.json
@@ -2,7 +2,7 @@
"name": "qs",
"repository": "hapijs/qs",
"description": "query-string parser / stringifier with nesting support",
- "version": "5.2.0",
+ "version": "6.1.0",
"keywords": ["querystring", "query", "parser"],
"main": "lib/index.js",
"scripts": [
diff --git a/deps/npm/node_modules/request/node_modules/qs/dist/qs.js b/deps/npm/node_modules/request/node_modules/qs/dist/qs.js
index 68433d45a8022d..bb8ea31a957830 100644
--- a/deps/npm/node_modules/request/node_modules/qs/dist/qs.js
+++ b/deps/npm/node_modules/request/node_modules/qs/dist/qs.js
@@ -198,7 +198,7 @@ var internals = {
encode: true
};
-internals.stringify = function (object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort) {
+internals.stringify = function (object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots) {
var obj = object;
if (typeof filter === 'function') {
obj = filter(prefix, obj);
@@ -243,9 +243,9 @@ internals.stringify = function (object, prefix, generateArrayPrefix, strictNullH
}
if (Array.isArray(obj)) {
- values = values.concat(internals.stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter));
+ values = values.concat(internals.stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
} else {
- values = values.concat(internals.stringify(obj[key], prefix + '[' + key + ']', generateArrayPrefix, strictNullHandling, skipNulls, encode, filter));
+ values = values.concat(internals.stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
}
}
@@ -260,6 +260,7 @@ module.exports = function (object, opts) {
var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : internals.skipNulls;
var encode = typeof options.encode === 'boolean' ? options.encode : internals.encode;
var sort = typeof options.sort === 'function' ? options.sort : null;
+ var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots;
var objKeys;
var filter;
if (typeof options.filter === 'function') {
@@ -301,7 +302,7 @@ module.exports = function (object, opts) {
continue;
}
- keys = keys.concat(internals.stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort));
+ keys = keys.concat(internals.stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
}
return keys.join(delimiter);
diff --git a/deps/npm/node_modules/request/node_modules/qs/lib/stringify.js b/deps/npm/node_modules/request/node_modules/qs/lib/stringify.js
index e7b669d3d58288..892dad45a9f30f 100755
--- a/deps/npm/node_modules/request/node_modules/qs/lib/stringify.js
+++ b/deps/npm/node_modules/request/node_modules/qs/lib/stringify.js
@@ -20,7 +20,7 @@ var internals = {
encode: true
};
-internals.stringify = function (object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort) {
+internals.stringify = function (object, prefix, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots) {
var obj = object;
if (typeof filter === 'function') {
obj = filter(prefix, obj);
@@ -65,9 +65,9 @@ internals.stringify = function (object, prefix, generateArrayPrefix, strictNullH
}
if (Array.isArray(obj)) {
- values = values.concat(internals.stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter));
+ values = values.concat(internals.stringify(obj[key], generateArrayPrefix(prefix, key), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
} else {
- values = values.concat(internals.stringify(obj[key], prefix + '[' + key + ']', generateArrayPrefix, strictNullHandling, skipNulls, encode, filter));
+ values = values.concat(internals.stringify(obj[key], prefix + (allowDots ? '.' + key : '[' + key + ']'), generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
}
}
@@ -82,6 +82,7 @@ module.exports = function (object, opts) {
var skipNulls = typeof options.skipNulls === 'boolean' ? options.skipNulls : internals.skipNulls;
var encode = typeof options.encode === 'boolean' ? options.encode : internals.encode;
var sort = typeof options.sort === 'function' ? options.sort : null;
+ var allowDots = typeof options.allowDots === 'undefined' ? false : options.allowDots;
var objKeys;
var filter;
if (typeof options.filter === 'function') {
@@ -123,7 +124,7 @@ module.exports = function (object, opts) {
continue;
}
- keys = keys.concat(internals.stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort));
+ keys = keys.concat(internals.stringify(obj[key], key, generateArrayPrefix, strictNullHandling, skipNulls, encode, filter, sort, allowDots));
}
return keys.join(delimiter);
diff --git a/deps/npm/node_modules/request/node_modules/qs/package.json b/deps/npm/node_modules/request/node_modules/qs/package.json
index 285b2b185e7b34..6b481626571e41 100644
--- a/deps/npm/node_modules/request/node_modules/qs/package.json
+++ b/deps/npm/node_modules/request/node_modules/qs/package.json
@@ -2,7 +2,7 @@
"name": "qs",
"description": "A querystring parser that supports nesting and arrays, with a depth limit",
"homepage": "https://github.com/ljharb/qs",
- "version": "6.0.2",
+ "version": "6.1.0",
"repository": {
"type": "git",
"url": "git+https://github.com/ljharb/qs.git"
@@ -43,22 +43,22 @@
"prepublish": "npm run dist"
},
"license": "BSD-3-Clause",
- "gitHead": "47dfbd6740b3cc1593847825701c8aa136f636e3",
+ "gitHead": "5bd79545edb33d6a43398fec7df9ecef2da005ea",
"bugs": {
"url": "https://github.com/ljharb/qs/issues"
},
- "_id": "qs@6.0.2",
- "_shasum": "88c68d590e8ed56c76c79f352c17b982466abfcd",
- "_from": "qs@>=6.0.2 <6.1.0",
+ "_id": "qs@6.1.0",
+ "_shasum": "ec1d1626b24278d99f0fdf4549e524e24eceeb26",
+ "_from": "qs@>=6.1.0 <6.2.0",
"_npmVersion": "3.3.12",
- "_nodeVersion": "5.4.1",
+ "_nodeVersion": "5.5.0",
"_npmUser": {
"name": "ljharb",
"email": "ljharb@gmail.com"
},
"dist": {
- "shasum": "88c68d590e8ed56c76c79f352c17b982466abfcd",
- "tarball": "http://registry.npmjs.org/qs/-/qs-6.0.2.tgz"
+ "shasum": "ec1d1626b24278d99f0fdf4549e524e24eceeb26",
+ "tarball": "http://registry.npmjs.org/qs/-/qs-6.1.0.tgz"
},
"maintainers": [
{
@@ -74,6 +74,11 @@
"email": "quitlahok@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-5-east.internal.npmjs.com",
+ "tmp": "tmp/qs-6.1.0.tgz_1454565583082_0.44599376199766994"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/qs/-/qs-6.0.2.tgz"
+ "_resolved": "https://registry.npmjs.org/qs/-/qs-6.1.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/qs/test/stringify.js b/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
index 88d22f25e932be..508c2460b69081 100755
--- a/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
+++ b/deps/npm/node_modules/request/node_modules/qs/test/stringify.js
@@ -22,6 +22,12 @@ test('stringify()', function (t) {
st.end();
});
+ t.test('stringifies a nested object with dots notation', function (st) {
+ st.equal(qs.stringify({ a: { b: 'c' } }, { allowDots: true }), 'a.b=c');
+ st.equal(qs.stringify({ a: { b: { c: { d: 'e' } } } }, { allowDots: true }), 'a.b.c.d=e');
+ st.end();
+ });
+
t.test('stringifies an array value', function (st) {
st.equal(qs.stringify({ a: ['b', 'c', 'd'] }), 'a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d');
st.end();
@@ -48,12 +54,23 @@ test('stringify()', function (t) {
st.end();
});
+ t.test('stringifies a nested array value with dots notation', function (st) {
+ st.equal(qs.stringify({ a: { b: ['c', 'd'] } }, { allowDots: true, encode: false }), 'a.b[0]=c&a.b[1]=d');
+ st.end();
+ });
+
t.test('stringifies an object inside an array', function (st) {
st.equal(qs.stringify({ a: [{ b: 'c' }] }), 'a%5B0%5D%5Bb%5D=c');
st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }), 'a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1');
st.end();
});
+ t.test('stringifies an object inside an array with dots notation', function (st) {
+ st.equal(qs.stringify({ a: [{ b: 'c' }] }, { allowDots: true, encode: false }), 'a[0].b=c');
+ st.equal(qs.stringify({ a: [{ b: { c: [1] } }] }, { allowDots: true, encode: false }), 'a[0].b.c[0]=1');
+ st.end();
+ });
+
t.test('does not omit object keys when indices = false', function (st) {
st.equal(qs.stringify({ a: [{ b: 'c' }] }, { indices: false }), 'a%5Bb%5D=c');
st.end();
@@ -232,4 +249,11 @@ test('stringify()', function (t) {
st.equal(qs.stringify({ a: 'c', z: { j: 'a', i: 'b' }, b: 'f' }, { sort: sort }), 'a=c&b=f&z%5Bi%5D=b&z%5Bj%5D=a');
st.end();
});
+
+ t.test('can sort the keys at depth 3 or more too', function (st) {
+ var sort = function (a, b) { return a.localeCompare(b); };
+ st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: sort, encode: false }), 'a=a&b=b&z[zi][zia]=zia&z[zi][zib]=zib&z[zj][zja]=zja&z[zj][zjb]=zjb');
+ st.equal(qs.stringify({ a: 'a', z: { zj: {zjb: 'zjb', zja: 'zja'}, zi: {zib: 'zib', zia: 'zia'} }, b: 'b' }, { sort: null, encode: false }), 'a=a&z[zj][zjb]=zjb&z[zj][zja]=zja&z[zi][zib]=zib&z[zi][zia]=zia&b=b');
+ st.end();
+ });
});
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
index 0afe4a2a960c58..12da297ac4d20f 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/lib/cookie.js
@@ -58,11 +58,11 @@ var CONTROL_CHARS = /[\x00-\x1F]/;
// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60)
// '=' and ';' are attribute/values separators
// (see: https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L64)
-var COOKIE_PAIR = /^(([^=;]+))\s*=\s*(("?)[^\n\r\0]*\3)/
+var COOKIE_PAIR = /^(([^=;]+))\s*=\s*([^\n\r\0]*)/;
// Used to parse non-RFC-compliant cookies like '=abc' when given the `loose`
// option in Cookie.parse:
-var LOOSE_COOKIE_PAIR = /^((?:=)?([^=;]*)\s*=\s*)?(("?)[^\n\r\0]*\3)/;
+var LOOSE_COOKIE_PAIR = /^((?:=)?([^=;]*)\s*=\s*)?([^\n\r\0]*)/;
// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"'
// Note ';' is \x3B
diff --git a/deps/npm/node_modules/request/node_modules/tough-cookie/package.json b/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
index 5fd5510a12ac53..cc1aebfc8efa13 100644
--- a/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
+++ b/deps/npm/node_modules/request/node_modules/tough-cookie/package.json
@@ -36,7 +36,7 @@
"RFC6265",
"RFC2965"
],
- "version": "2.2.1",
+ "version": "2.2.2",
"homepage": "https://github.com/SalesforceEng/tough-cookie",
"repository": {
"type": "git",
@@ -60,19 +60,19 @@
"async": "^1.4.2",
"vows": "^0.8.1"
},
- "gitHead": "f1055655ea56c85bd384aaf7d5b740b916700b6f",
- "_id": "tough-cookie@2.2.1",
- "_shasum": "3b0516b799e70e8164436a1446e7e5877fda118e",
+ "gitHead": "cc46628c4d7d2e8c372ecba29293ca8a207ec192",
+ "_id": "tough-cookie@2.2.2",
+ "_shasum": "c83a1830f4e5ef0b93ef2a3488e724f8de016ac7",
"_from": "tough-cookie@>=2.2.0 <2.3.0",
- "_npmVersion": "2.11.2",
- "_nodeVersion": "0.12.5",
+ "_npmVersion": "3.3.12",
+ "_nodeVersion": "5.1.1",
"_npmUser": {
"name": "jstash",
"email": "jstash@gmail.com"
},
"dist": {
- "shasum": "3b0516b799e70e8164436a1446e7e5877fda118e",
- "tarball": "http://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.1.tgz"
+ "shasum": "c83a1830f4e5ef0b93ef2a3488e724f8de016ac7",
+ "tarball": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.2.tgz"
},
"maintainers": [
{
@@ -84,7 +84,11 @@
"email": "services@goinstant.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-13-west.internal.npmjs.com",
+ "tmp": "tmp/tough-cookie-2.2.2.tgz_1457564639182_0.5129188685677946"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.1.tgz",
+ "_resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.2.2.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json b/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
index 4ea6ea5618acfa..6677690f9b3648 100644
--- a/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
+++ b/deps/npm/node_modules/request/node_modules/tunnel-agent/package.json
@@ -7,35 +7,38 @@
"name": "tunnel-agent",
"license": "Apache-2.0",
"description": "HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.",
- "version": "0.4.2",
+ "version": "0.4.3",
"repository": {
"url": "git+https://github.com/mikeal/tunnel-agent.git"
},
"main": "index.js",
+ "files": [
+ "index.js"
+ ],
"dependencies": {},
"devDependencies": {},
"optionalDependencies": {},
"engines": {
"node": "*"
},
- "gitHead": "449634d1054949f9f145f4925985a8dea0f46b0f",
+ "gitHead": "e72d830f5ed388a2a71d37ce062c38e3fb34bdde",
"bugs": {
"url": "https://github.com/mikeal/tunnel-agent/issues"
},
"homepage": "https://github.com/mikeal/tunnel-agent#readme",
- "_id": "tunnel-agent@0.4.2",
+ "_id": "tunnel-agent@0.4.3",
"scripts": {},
- "_shasum": "1104e3f36ac87125c287270067d582d18133bfee",
+ "_shasum": "6373db76909fe570e08d73583365ed828a74eeeb",
"_from": "tunnel-agent@>=0.4.1 <0.5.0",
- "_npmVersion": "3.3.12",
- "_nodeVersion": "5.0.0",
+ "_npmVersion": "2.15.3",
+ "_nodeVersion": "5.9.0",
"_npmUser": {
"name": "simov",
"email": "simeonvelichkov@gmail.com"
},
"dist": {
- "shasum": "1104e3f36ac87125c287270067d582d18133bfee",
- "tarball": "http://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.2.tgz"
+ "shasum": "6373db76909fe570e08d73583365ed828a74eeeb",
+ "tarball": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz"
},
"maintainers": [
{
@@ -55,7 +58,11 @@
"email": "simeonvelichkov@gmail.com"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-16-east.internal.npmjs.com",
+ "tmp": "tmp/tunnel-agent-0.4.3.tgz_1462396470295_0.23639482469297945"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.2.tgz",
+ "_resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.4.3.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/package.json b/deps/npm/node_modules/request/package.json
index f365be73cccad4..70c7f76dc77429 100644
--- a/deps/npm/node_modules/request/package.json
+++ b/deps/npm/node_modules/request/package.json
@@ -7,7 +7,7 @@
"util",
"utility"
],
- "version": "2.69.0",
+ "version": "2.72.0",
"author": {
"name": "Mikeal Rogers",
"email": "mikeal.rogers@gmail.com"
@@ -27,22 +27,22 @@
"dependencies": {
"aws-sign2": "~0.6.0",
"aws4": "^1.2.1",
- "bl": "~1.0.0",
+ "bl": "~1.1.2",
"caseless": "~0.11.0",
"combined-stream": "~1.0.5",
"extend": "~3.0.0",
"forever-agent": "~0.6.1",
"form-data": "~1.0.0-rc3",
"har-validator": "~2.0.6",
- "hawk": "~3.1.0",
+ "hawk": "~3.1.3",
"http-signature": "~1.1.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.7",
"node-uuid": "~1.4.7",
- "oauth-sign": "~0.8.0",
- "qs": "~6.0.2",
+ "oauth-sign": "~0.8.1",
+ "qs": "~6.1.0",
"stringstream": "~0.0.4",
"tough-cookie": "~2.2.0",
"tunnel-agent": "~0.4.1"
@@ -55,36 +55,41 @@
"lint": "eslint lib/ *.js tests/ && echo Lint passed."
},
"devDependencies": {
- "bluebird": "^3.0.2",
- "browserify": "^13.0.0",
- "browserify-istanbul": "^0.1.5",
+ "bluebird": "^3.2.1",
+ "browserify": "^12.0.2",
+ "browserify-istanbul": "^2.0.0",
"buffer-equal": "^1.0.0",
"codecov.io": "^0.1.6",
"coveralls": "^2.11.4",
- "eslint": "1.10.3",
+ "eslint": "^2.5.3",
"function-bind": "^1.0.2",
"istanbul": "^0.4.0",
"karma": "^0.13.10",
- "karma-browserify": "^4.4.0",
+ "karma-browserify": "^5.0.1",
"karma-cli": "^0.1.1",
"karma-coverage": "^0.5.3",
- "karma-phantomjs-launcher": "^0.1.4",
+ "karma-phantomjs-launcher": "^1.0.0",
"karma-tap": "^1.0.3",
+ "phantomjs-prebuilt": "^2.1.3",
"rimraf": "^2.2.8",
"server-destroy": "^1.0.1",
"tape": "^4.2.0",
"taper": "^0.4.0"
},
- "gitHead": "1c2fb40c74efb4f706f350a78dbd5e58fe913af3",
+ "gitHead": "6dcac13642955577592fdafb5ff3cdc8a6ff1b1b",
"homepage": "https://github.com/request/request#readme",
- "_id": "request@2.69.0",
- "_shasum": "cf91d2e000752b1217155c005241911991a2346a",
- "_from": "request@2.69.0",
- "_npmVersion": "2.14.4",
- "_nodeVersion": "4.1.2",
+ "_id": "request@2.72.0",
+ "_shasum": "0ce3a179512620b10441f14c82e21c12c0ddb4e1",
+ "_from": "request@2.72.0",
+ "_npmVersion": "3.8.5",
+ "_nodeVersion": "5.9.0",
"_npmUser": {
- "name": "mikeal",
- "email": "mikeal.rogers@gmail.com"
+ "name": "simov",
+ "email": "simeonvelichkov@gmail.com"
+ },
+ "dist": {
+ "shasum": "0ce3a179512620b10441f14c82e21c12c0ddb4e1",
+ "tarball": "https://registry.npmjs.org/request/-/request-2.72.0.tgz"
},
"maintainers": [
{
@@ -104,10 +109,11 @@
"email": "simeonvelichkov@gmail.com"
}
],
- "dist": {
- "shasum": "cf91d2e000752b1217155c005241911991a2346a",
- "tarball": "http://registry.npmjs.org/request/-/request-2.69.0.tgz"
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/request-2.72.0.tgz_1460901215210_0.9173020373564214"
},
"directories": {},
- "_resolved": "https://registry.npmjs.org/request/-/request-2.69.0.tgz"
+ "_resolved": "https://registry.npmjs.org/request/-/request-2.72.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/request/request.js b/deps/npm/node_modules/request/request.js
index 441db80f5a009c..ac2cd8098f1045 100644
--- a/deps/npm/node_modules/request/request.js
+++ b/deps/npm/node_modules/request/request.js
@@ -16,6 +16,7 @@ var http = require('http')
, ForeverAgent = require('forever-agent')
, FormData = require('form-data')
, extend = require('extend')
+ , isstream = require('isstream')
, isTypedArray = require('is-typedarray').strict
, helpers = require('./lib/helpers')
, cookies = require('./lib/cookies')
@@ -398,7 +399,7 @@ Request.prototype.init = function (options) {
}
if (self.gzip && !self.hasHeader('accept-encoding')) {
- self.setHeader('accept-encoding', 'gzip')
+ self.setHeader('accept-encoding', 'gzip, deflate')
}
if (self.uri.auth && !self.hasHeader('authorization')) {
@@ -452,7 +453,7 @@ Request.prototype.init = function (options) {
}
}
}
- if (self.body) {
+ if (self.body && !isstream(self.body)) {
setContentLength()
}
@@ -552,15 +553,19 @@ Request.prototype.init = function (options) {
self._multipart.body.pipe(self)
}
if (self.body) {
- setContentLength()
- if (Array.isArray(self.body)) {
- self.body.forEach(function (part) {
- self.write(part)
- })
+ if (isstream(self.body)) {
+ self.body.pipe(self)
} else {
- self.write(self.body)
+ setContentLength()
+ if (Array.isArray(self.body)) {
+ self.body.forEach(function (part) {
+ self.write(part)
+ })
+ } else {
+ self.write(self.body)
+ }
+ self.end()
}
- self.end()
} else if (self.requestBodyStream) {
console.warn('options.requestBodyStream is deprecated, please pass the request object to stream.pipe.')
self.requestBodyStream.pipe(self)
@@ -744,7 +749,12 @@ Request.prototype.start = function () {
debug('make request', self.uri.href)
- self.req = self.httpModule.request(reqOptions)
+ try {
+ self.req = self.httpModule.request(reqOptions)
+ } catch (err) {
+ self.emit('error', err)
+ return
+ }
if (self.timing) {
self.startTime = new Date().getTime()
@@ -910,14 +920,29 @@ Request.prototype.onRequestResponse = function (response) {
self._ended = true
})
+ var noBody = function (code) {
+ return (
+ self.method === 'HEAD'
+ // Informational
+ || (code >= 100 && code < 200)
+ // No Content
+ || code === 204
+ // Not Modified
+ || code === 304
+ )
+ }
+
var responseContent
- if (self.gzip) {
+ if (self.gzip && !noBody(response.statusCode)) {
var contentEncoding = response.headers['content-encoding'] || 'identity'
contentEncoding = contentEncoding.trim().toLowerCase()
if (contentEncoding === 'gzip') {
responseContent = zlib.createGunzip()
response.pipe(responseContent)
+ } else if (contentEncoding === 'deflate') {
+ responseContent = zlib.createInflate()
+ response.pipe(responseContent)
} else {
// Since previous versions didn't check for Content-Encoding header,
// ignore any invalid values to preserve backwards-compatibility
@@ -1001,6 +1026,9 @@ Request.prototype.readResponseBody = function (response) {
debug('end event', self.uri.href)
if (self._aborted) {
debug('aborted', self.uri.href)
+ // `buffer` is defined in the parent scope and used in a closure it exists for the life of the request.
+ // This can lead to leaky behavior if the user retains a reference to the request object.
+ buffer.destroy()
return
}
@@ -1013,6 +1041,9 @@ Request.prototype.readResponseBody = function (response) {
} else {
response.body = buffer.toString(self.encoding)
}
+ // `buffer` is defined in the parent scope and used in a closure it exists for the life of the Request.
+ // This can lead to leaky behavior if the user retains a reference to the request object.
+ buffer.destroy()
} else if (strings.length) {
// The UTF8 BOM [0xEF,0xBB,0xBF] is converted to [0xFE,0xFF] in the JS UTC16/UCS2 representation.
// Strip this value out when the encoding is set to 'utf8', as upstream consumers won't expect it and it breaks JSON.parse().
@@ -1157,11 +1188,15 @@ Request.prototype.json = function (val) {
self.setHeader('accept', 'application/json')
}
+ if (typeof self.jsonReplacer === 'function') {
+ self._jsonReplacer = self.jsonReplacer
+ }
+
self._json = true
if (typeof val === 'boolean') {
if (self.body !== undefined) {
if (!/^application\/x-www-form-urlencoded\b/.test(self.getHeader('content-type'))) {
- self.body = safeStringify(self.body)
+ self.body = safeStringify(self.body, self._jsonReplacer)
} else {
self.body = self._qs.rfc3986(self.body)
}
@@ -1170,7 +1205,7 @@ Request.prototype.json = function (val) {
}
}
} else {
- self.body = safeStringify(val)
+ self.body = safeStringify(val, self._jsonReplacer)
if (!self.hasHeader('content-type')) {
self.setHeader('content-type', 'application/json')
}
@@ -1368,7 +1403,9 @@ Request.prototype.write = function () {
if (!self._started) {
self.start()
}
- return self.req.write.apply(self.req, arguments)
+ if (self.req) {
+ return self.req.write.apply(self.req, arguments)
+ }
}
Request.prototype.end = function (chunk) {
var self = this
@@ -1380,7 +1417,9 @@ Request.prototype.end = function (chunk) {
if (!self._started) {
self.start()
}
- self.req.end()
+ if (self.req) {
+ self.req.end()
+ }
}
Request.prototype.pause = function () {
var self = this
diff --git a/deps/npm/node_modules/sorted-object/LICENSE.txt b/deps/npm/node_modules/sorted-object/LICENSE.txt
index a92a60b824eb52..4a323deb518f3a 100644
--- a/deps/npm/node_modules/sorted-object/LICENSE.txt
+++ b/deps/npm/node_modules/sorted-object/LICENSE.txt
@@ -1,4 +1,4 @@
-Copyright © 2014 Domenic Denicola
+Copyright © 2014–2016 Domenic Denicola
This work is free. You can redistribute it and/or modify it under the
terms of the Do What The Fuck You Want To Public License, Version 2,
diff --git a/deps/npm/node_modules/sorted-object/lib/sorted-object.js b/deps/npm/node_modules/sorted-object/lib/sorted-object.js
index 26ebd500a35710..1b3fe81a6be930 100644
--- a/deps/npm/node_modules/sorted-object/lib/sorted-object.js
+++ b/deps/npm/node_modules/sorted-object/lib/sorted-object.js
@@ -1,7 +1,7 @@
"use strict";
module.exports = function (input) {
- var output = Object.create(null);
+ var output = {};
Object.keys(input).sort().forEach(function (key) {
output[key] = input[key];
diff --git a/deps/npm/node_modules/sorted-object/package.json b/deps/npm/node_modules/sorted-object/package.json
index 5bd814207aa352..0e4022d041168a 100644
--- a/deps/npm/node_modules/sorted-object/package.json
+++ b/deps/npm/node_modules/sorted-object/package.json
@@ -6,32 +6,58 @@
"keys",
"object"
],
- "version": "1.0.0",
+ "version": "2.0.0",
"author": {
"name": "Domenic Denicola",
- "email": "domenic@domenicdenicola.com",
- "url": "http://domenic.me/"
+ "email": "d@domenic.me",
+ "url": "https://domenic.me/"
},
"license": "WTFPL",
"repository": {
"type": "git",
- "url": "git://github.com/domenic/sorted-object.git"
- },
- "bugs": {
- "url": "http://github.com/domenic/sorted-object/issues"
+ "url": "git+https://github.com/domenic/sorted-object.git"
},
"main": "lib/sorted-object.js",
+ "files": [
+ "lib/"
+ ],
"scripts": {
"test": "tape test/tests.js",
- "lint": "jshint lib && jshint test"
+ "lint": "eslint ."
},
"devDependencies": {
- "jshint": "~2.4.3",
- "tape": "~2.4.2"
- },
- "readme": "# Get a Version of an Object with Sorted Keys\n\nAlthough objects in JavaScript are theoretically unsorted, in practice most engines use insertion order—at least, ignoring numeric keys. This manifests itself most prominently when dealing with an object's JSON serialization.\n\nSo, for example, you might be trying to serialize some object to a JSON file. But every time you write it, it ends up being output in a different order, depending on how you created it in the first place! This makes for some ugly diffs.\n\n**sorted-object** gives you the answer. Just use this package to create a version of your object with its keys sorted before serializing, and you'll get a consistent order every time.\n\n```js\nvar sortedObject = require(\"sorted-object\");\n\nvar objectToSerialize = generateStuffNondeterministically();\n\n// Before:\nfs.writeFileSync(\"dest.json\", JSON.stringify(objectToSerialize));\n\n// After:\nvar sortedVersion = sortedObject(objectToSerialize);\nfs.writeFileSync(\"dest.json\", JSON.stringify(sortedVersion));\n```\n",
- "readmeFilename": "README.md",
- "homepage": "https://github.com/domenic/sorted-object",
- "_id": "sorted-object@1.0.0",
- "_from": "sorted-object@"
+ "eslint": "^2.4.0",
+ "tape": "^4.5.1"
+ },
+ "gitHead": "3cbdde212c8ceef219fbb8fa7805bfc38b94aa90",
+ "bugs": {
+ "url": "https://github.com/domenic/sorted-object/issues"
+ },
+ "homepage": "https://github.com/domenic/sorted-object#readme",
+ "_id": "sorted-object@2.0.0",
+ "_shasum": "1cfea981609047d8043807a490a9d99b317faf7f",
+ "_from": "sorted-object@2.0.0",
+ "_npmVersion": "3.6.0",
+ "_nodeVersion": "5.7.1",
+ "_npmUser": {
+ "name": "domenic",
+ "email": "d@domenic.me"
+ },
+ "dist": {
+ "shasum": "1cfea981609047d8043807a490a9d99b317faf7f",
+ "tarball": "http://registry.npmjs.org/sorted-object/-/sorted-object-2.0.0.tgz"
+ },
+ "maintainers": [
+ {
+ "name": "domenic",
+ "email": "domenic@domenicdenicola.com"
+ }
+ ],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/sorted-object-2.0.0.tgz_1457910693572_0.6718082851730287"
+ },
+ "directories": {},
+ "_resolved": "https://registry.npmjs.org/sorted-object/-/sorted-object-2.0.0.tgz",
+ "readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/which/CHANGELOG.md b/deps/npm/node_modules/which/CHANGELOG.md
new file mode 100644
index 00000000000000..03879364f87804
--- /dev/null
+++ b/deps/npm/node_modules/which/CHANGELOG.md
@@ -0,0 +1,113 @@
+# Changes
+
+
+## v1.2.8
+
+* bullet items in changelog that contain (but don't start with) #
+
+## v1.2.7
+
+* strip 'update changelog' changelog entries out of changelog
+
+## v1.2.6
+
+* make the changelog bulleted
+
+## v1.2.5
+
+* make a changelog, and keep it up to date
+* don't include tests in package
+* Properly handle relative-path executables
+* appveyor
+* Attach error code to Not Found error
+* Make tests pass on Windows
+
+## v1.2.4
+
+* Fix typo
+
+## v1.2.3
+
+* update isexe, fix regression in pathExt handling
+
+## v1.2.2
+
+* update deps, use isexe module, test windows
+
+## v1.2.1
+
+* Sometimes windows PATH entries are quoted
+* Fixed a bug in the check for group and user mode bits. This bug was introduced during refactoring for supporting strict mode.
+* doc cli
+
+## v1.2.0
+
+* Add support for opt.all and -as cli flags
+* test the bin
+* update travis
+* Allow checking for multiple programs in bin/which
+* tap 2
+
+## v1.1.2
+
+* travis
+* Refactored and fixed undefined error on Windows
+* Support strict mode
+
+## v1.1.1
+
+* test +g exes against secondary groups, if available
+* Use windows exe semantics on cygwin & msys
+* cwd should be first in path on win32, not last
+* Handle lower-case 'env.Path' on Windows
+* Update docs
+* use single-quotes
+
+## v1.1.0
+
+* Add tests, depend on is-absolute
+
+## v1.0.9
+
+* which.js: root is allowed to execute files owned by anyone
+
+## v1.0.8
+
+* don't use graceful-fs
+
+## v1.0.7
+
+* add license to package.json
+
+## v1.0.6
+
+* isc license
+
+## 1.0.5
+
+* Awful typo
+
+## 1.0.4
+
+* Test for path absoluteness properly
+* win: Allow '' as a pathext if cmd has a . in it
+
+## 1.0.3
+
+* Remove references to execPath
+* Make `which.sync()` work on Windows by honoring the PATHEXT variable.
+* Make `isExe()` always return true on Windows.
+* MIT
+
+## 1.0.2
+
+* Only files can be exes
+
+## 1.0.1
+
+* Respect the PATHEXT env for win32 support
+* should 0755 the bin
+* binary
+* guts
+* package
+* 1st
diff --git a/deps/npm/node_modules/which/changelog.sh b/deps/npm/node_modules/which/changelog.sh
new file mode 100644
index 00000000000000..360e54af40f6af
--- /dev/null
+++ b/deps/npm/node_modules/which/changelog.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+(
+ echo '# Changes'
+ echo ''
+ git log --first-parent --pretty=format:'%s' \
+ | grep -v '^update changelog' \
+ | perl -p -e 's/^((v?[0-9]+\.?)+)$/\n## \1\n/g' \
+ | perl -p -e 's/^([^#\s].*)$/* \1/g'
+)> CHANGELOG.md
diff --git a/deps/npm/node_modules/which/node_modules/isexe/LICENSE b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
new file mode 100644
index 00000000000000..19129e315fe593
--- /dev/null
+++ b/deps/npm/node_modules/which/node_modules/isexe/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/which/node_modules/isexe/package.json b/deps/npm/node_modules/which/node_modules/isexe/package.json
index f5adcb683e8340..0787bc7599d469 100644
--- a/deps/npm/node_modules/which/node_modules/isexe/package.json
+++ b/deps/npm/node_modules/which/node_modules/isexe/package.json
@@ -1,6 +1,6 @@
{
"name": "isexe",
- "version": "1.1.1",
+ "version": "1.1.2",
"description": "Minimal module to check if a file is executable.",
"main": "index.js",
"directories": {
@@ -29,19 +29,19 @@
"url": "https://github.com/isaacs/isexe/issues"
},
"homepage": "https://github.com/isaacs/isexe#readme",
- "gitHead": "af83031caed58654ad9d20b98eb710d383618ad7",
- "_id": "isexe@1.1.1",
- "_shasum": "f0d4793ed2fb5c46bfdeab760bbb965f4485a66c",
+ "gitHead": "1882eed72c2ba152f4dd1336d857b0755ae306d9",
+ "_id": "isexe@1.1.2",
+ "_shasum": "36f3e22e60750920f5e7241a476a8c6a42275ad0",
"_from": "isexe@>=1.1.1 <2.0.0",
- "_npmVersion": "2.14.15",
+ "_npmVersion": "3.7.0",
"_nodeVersion": "4.0.0",
"_npmUser": {
"name": "isaacs",
"email": "i@izs.me"
},
"dist": {
- "shasum": "f0d4793ed2fb5c46bfdeab760bbb965f4485a66c",
- "tarball": "http://registry.npmjs.org/isexe/-/isexe-1.1.1.tgz"
+ "shasum": "36f3e22e60750920f5e7241a476a8c6a42275ad0",
+ "tarball": "http://registry.npmjs.org/isexe/-/isexe-1.1.2.tgz"
},
"maintainers": [
{
@@ -49,6 +49,10 @@
"email": "i@izs.me"
}
],
- "_resolved": "https://registry.npmjs.org/isexe/-/isexe-1.1.1.tgz",
+ "_npmOperationalInternal": {
+ "host": "packages-9-west.internal.npmjs.com",
+ "tmp": "tmp/isexe-1.1.2.tgz_1454992795963_0.7608721863944083"
+ },
+ "_resolved": "https://registry.npmjs.org/isexe/-/isexe-1.1.2.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
index f3b3fd32d91ac6..964ddcf56b4800 100644
--- a/deps/npm/node_modules/which/package.json
+++ b/deps/npm/node_modules/which/package.json
@@ -6,7 +6,7 @@
},
"name": "which",
"description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
- "version": "1.2.4",
+ "version": "1.2.8",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-which.git"
@@ -26,25 +26,31 @@
"tap": "^5.1.1"
},
"scripts": {
- "test": "tap test/*.js --cov"
+ "test": "tap test/*.js --cov",
+ "changelog": "bash changelog.sh",
+ "postversion": "npm run changelog && git add CHANGELOG.md && git commit -m 'update changelog - '${npm_package_version}"
},
- "gitHead": "1375684d40af9de2ecc527d1ab9b87b537d7a1cc",
+ "files": [
+ "which.js",
+ "bin/which"
+ ],
+ "gitHead": "e4de2c25e9163b1f55323792f0fc5806e948ffc1",
"bugs": {
"url": "https://github.com/isaacs/node-which/issues"
},
"homepage": "https://github.com/isaacs/node-which#readme",
- "_id": "which@1.2.4",
- "_shasum": "1557f96080604e5b11b3599eb9f45b50a9efd722",
- "_from": "which@1.2.4",
- "_npmVersion": "2.14.15",
- "_nodeVersion": "4.0.0",
+ "_id": "which@1.2.8",
+ "_shasum": "37fa9f6eab30e49b8ef6eea24681c5799d52ebd6",
+ "_from": "which@1.2.8",
+ "_npmVersion": "3.8.9",
+ "_nodeVersion": "5.6.0",
"_npmUser": {
"name": "isaacs",
"email": "i@izs.me"
},
"dist": {
- "shasum": "1557f96080604e5b11b3599eb9f45b50a9efd722",
- "tarball": "http://registry.npmjs.org/which/-/which-1.2.4.tgz"
+ "shasum": "37fa9f6eab30e49b8ef6eea24681c5799d52ebd6",
+ "tarball": "https://registry.npmjs.org/which/-/which-1.2.8.tgz"
},
"maintainers": [
{
@@ -52,7 +58,11 @@
"email": "i@izs.me"
}
],
+ "_npmOperationalInternal": {
+ "host": "packages-12-west.internal.npmjs.com",
+ "tmp": "tmp/which-1.2.8.tgz_1462472514341_0.746755798580125"
+ },
"directories": {},
- "_resolved": "https://registry.npmjs.org/which/-/which-1.2.4.tgz",
+ "_resolved": "https://registry.npmjs.org/which/-/which-1.2.8.tgz",
"readme": "ERROR: No README data found!"
}
diff --git a/deps/npm/node_modules/which/test/basic.js b/deps/npm/node_modules/which/test/basic.js
deleted file mode 100644
index 54c8d2384dc27f..00000000000000
--- a/deps/npm/node_modules/which/test/basic.js
+++ /dev/null
@@ -1,120 +0,0 @@
-var t = require('tap')
-var fs = require('fs')
-var rimraf = require('rimraf')
-var mkdirp = require('mkdirp')
-var fixture = __dirname + '/fixture'
-var which = require('../which.js')
-var path = require('path')
-
-var isWindows = process.platform === 'win32' ||
- process.env.OSTYPE === 'cygwin' ||
- process.env.OSTYPE === 'msys'
-
-var skip = { skip: isWindows ? 'not relevant on windows' : false }
-
-t.test('setup', function (t) {
- rimraf.sync(fixture)
- mkdirp.sync(fixture)
- fs.writeFileSync(fixture + '/foo.sh', 'echo foo\n')
- t.end()
-})
-
-t.test('does not find non-executable', skip, function (t) {
- t.plan(2)
-
- t.test('absolute', function (t) {
- t.plan(2)
- which(fixture + '/foo.sh', function (er) {
- t.isa(er, Error)
- })
-
- t.throws(function () {
- which.sync(fixture + '/foo.sh')
- })
- })
-
- t.test('with path', function (t) {
- t.plan(2)
- which('foo.sh', { path: fixture }, function (er) {
- t.isa(er, Error)
- })
-
- t.throws(function () {
- which.sync('foo.sh', { path: fixture })
- })
- })
-})
-
-t.test('make executable', function (t) {
- fs.chmodSync(fixture + '/foo.sh', '0755')
- t.end()
-})
-
-t.test('find when executable', function (t) {
- var opt = { pathExt: '.sh' }
- var expect = path.resolve(fixture, 'foo.sh').toLowerCase()
- var PATH = process.env.PATH || process.env.Path
-
- t.test('absolute', function (t) {
- runTest(fixture + '/foo.sh', t)
- })
-
- t.test('with process.env.PATH', function (t) {
- process.env.PATH = process.env.Path = fixture
- runTest('foo.sh', t)
- })
-
- t.test('with process.env.Path', {
- skip: isWindows ? false : 'Only for Windows'
- }, function (t) {
- process.env.PATH = ""
- process.env.Path = fixture
- runTest('foo.sh', t)
- })
-
- t.test('with pathExt', {
- skip: isWindows ? false : 'Only for Windows'
- }, function (t) {
- var pe = process.env.PATHEXT
- process.env.PATHEXT = '.SH'
-
- t.test('foo.sh', function (t) {
- runTest('foo.sh', t)
- })
- t.test('foo', function (t) {
- runTest('foo', t)
- })
- t.test('replace', function (t) {
- process.env.PATHEXT = pe
- t.end()
- })
- t.end()
- })
-
- t.test('with path opt', function (t) {
- opt.path = fixture
- runTest('foo.sh', t)
- })
-
- function runTest(exec, t) {
- t.plan(2)
-
- var found = which.sync(exec, opt).toLowerCase()
- t.equal(found, expect)
-
- which(exec, opt, function (er, found) {
- if (er)
- throw er
- t.equal(found.toLowerCase(), expect)
- t.end()
- process.env.PATH = PATH
- })
- }
-
- t.end()
-})
-
-t.test('clean', function (t) {
- rimraf.sync(fixture)
- t.end()
-})
diff --git a/deps/npm/node_modules/which/test/bin.js b/deps/npm/node_modules/which/test/bin.js
deleted file mode 100644
index ff7eb530c7fcc2..00000000000000
--- a/deps/npm/node_modules/which/test/bin.js
+++ /dev/null
@@ -1,119 +0,0 @@
-var t = require('tap')
-var spawn = require('child_process').spawn
-var node = process.execPath
-var bin = require.resolve('../bin/which')
-
-function which (args, extraPath, cb) {
- if (typeof extraPath === 'function')
- cb = extraPath, extraPath = null
-
- var options = {}
- if (extraPath) {
- var sep = process.platform === 'win32' ? ';' : ':'
- var p = process.env.PATH + sep + extraPath
- options.env = Object.keys(process.env).reduce(function (env, k) {
- if (!k.match(/^path$/i))
- env[k] = process.env[k]
- return env
- }, { PATH: p })
- }
-
- var out = ''
- var err = ''
- var child = spawn(node, [bin].concat(args), options)
- child.stdout.on('data', function (c) {
- out += c
- })
- child.stderr.on('data', function (c) {
- err += c
- })
- child.on('close', function (code, signal) {
- cb(code, signal, out.trim(), err.trim())
- })
-}
-
-t.test('finds node', function (t) {
- which('node', function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 0)
- t.equal(err, '')
- t.match(out, /[\\\/]node(\.exe)?$/)
- t.end()
- })
-})
-
-t.test('does not find flergyderp', function (t) {
- which('flergyderp', function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 1)
- t.equal(err, '')
- t.match(out, '')
- t.end()
- })
-})
-
-t.test('finds node and tap', function (t) {
- which(['node', 'tap'], function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 0)
- t.equal(err, '')
- t.match(out.split(/\n/), [
- /[\\\/]node(\.exe)?$/,
- /[\\\/]tap(\.cmd)?$/
- ])
- t.end()
- })
-})
-
-t.test('finds node and tap, but not flergyderp', function (t) {
- which(['node', 'flergyderp', 'tap'], function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 1)
- t.equal(err, '')
- t.match(out.split(/\n/), [
- /[\\\/]node(\.exe)?$/,
- /[\\\/]tap(\.cmd)?$/
- ])
- t.end()
- })
-})
-
-t.test('cli flags', function (t) {
- var p = require('path').dirname(bin)
- var cases = [ '-a', '-s', '-as', '-sa' ]
- t.plan(cases.length)
- cases.forEach(function (c) {
- t.test(c, function (t) {
- which(['which', c], p, function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 0)
- t.equal(err, '')
- if (/s/.test(c))
- t.equal(out, '', 'should be silent')
- else if (/a/.test(c))
- t.ok(out.split(/\n/).length > 1, 'should have more than 1 result')
- t.end()
- })
- })
- })
-})
-
-t.test('shows usage', function (t) {
- which([], function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 1)
- t.equal(err, 'usage: which [-as] program ...')
- t.equal(out, '')
- t.end()
- })
-})
-
-t.test('complains about unknown flag', function (t) {
- which(['node', '-sax'], function (code, signal, out, err) {
- t.equal(signal, null)
- t.equal(code, 1)
- t.equal(out, '')
- t.equal(err, 'which: illegal option -- x\nusage: which [-as] program ...')
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/which/test/windows.js b/deps/npm/node_modules/which/test/windows.js
deleted file mode 100644
index 1d5e4294a69357..00000000000000
--- a/deps/npm/node_modules/which/test/windows.js
+++ /dev/null
@@ -1,10 +0,0 @@
-// pretend to be Windows.
-if (process.platform === 'win32') {
- var t = require('tap')
- t.plan(0, 'already on windows')
- process.exit(0)
-}
-
-process.env.Path = process.env.PATH.split(':').join(';')
-process.env.OSTYPE = 'cygwin'
-require('./basic.js')
diff --git a/deps/npm/node_modules/which/which.js b/deps/npm/node_modules/which/which.js
index 5cf0124d7899b5..4ba14cd02f24cc 100644
--- a/deps/npm/node_modules/which/which.js
+++ b/deps/npm/node_modules/which/which.js
@@ -11,7 +11,14 @@ var isexe = require('isexe')
var fs = require('fs')
var isAbsolute = require('is-absolute')
-function getPathInfo(cmd, opt) {
+function getNotFoundError (cmd) {
+ var er = new Error('not found: ' + cmd)
+ er.code = 'ENOENT'
+
+ return er
+}
+
+function getPathInfo (cmd, opt) {
var colon = opt.colon || COLON
var pathEnv = opt.path || process.env.Path || process.env.PATH || ''
var pathExt = ['']
@@ -31,9 +38,9 @@ function getPathInfo(cmd, opt) {
pathExt.unshift('')
}
- // If it's absolute, then we don't bother searching the pathenv.
+ // If it has a slash, then we don't bother searching the pathenv.
// just check the file itself, and that's it.
- if (isAbsolute(cmd))
+ if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
pathEnv = ['']
return {
@@ -60,14 +67,17 @@ function which (cmd, opt, cb) {
if (opt.all && found.length)
return cb(null, found)
else
- return cb(new Error('not found: '+cmd))
+ return cb(getNotFoundError(cmd))
}
var pathPart = pathEnv[i]
if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
pathPart = pathPart.slice(1, -1)
- var p = path.resolve(pathPart, cmd)
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && (/^\./).test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
;(function E (ii, ll) {
if (ii === ll) return F(i + 1, l)
var ext = pathExt[ii]
@@ -99,6 +109,9 @@ function whichSync (cmd, opt) {
pathPart = pathPart.slice(1, -1)
var p = path.join(pathPart, cmd)
+ if (!pathPart && (/^\./).test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
for (var j = 0, ll = pathExt.length; j < ll; j ++) {
var cur = p + pathExt[j]
var is
@@ -117,5 +130,5 @@ function whichSync (cmd, opt) {
if (opt.all && found.length)
return found
- throw new Error('not found: '+cmd)
+ throw getNotFoundError(cmd)
}
diff --git a/deps/npm/package.json b/deps/npm/package.json
index 5dee726325d98c..50ec15a47e955c 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,5 +1,5 @@
{
- "version": "2.15.4",
+ "version": "2.15.5",
"name": "npm",
"description": "a package manager for JavaScript",
"keywords": [
@@ -36,7 +36,7 @@
"ansistyles": "~0.1.3",
"archy": "~1.0.0",
"async-some": "~1.0.2",
- "block-stream": "0.0.8",
+ "block-stream": "0.0.9",
"char-spinner": "~1.0.1",
"chmodr": "~1.0.2",
"chownr": "~1.0.1",
@@ -52,14 +52,14 @@
"github-url-from-git": "~1.4.0",
"github-url-from-username-repo": "~1.0.2",
"glob": "~7.0.3",
- "graceful-fs": "~4.1.3",
+ "graceful-fs": "~4.1.4",
"hosted-git-info": "~2.1.4",
"inflight": "~1.0.4",
"inherits": "~2.0.1",
"ini": "~1.3.4",
"init-package-json": "~1.9.3",
"lockfile": "~1.0.1",
- "lru-cache": "~3.2.0",
+ "lru-cache": "~4.0.1",
"minimatch": "~3.0.0",
"mkdirp": "~0.5.1",
"node-gyp": "~3.3.1",
@@ -78,16 +78,16 @@
"path-is-inside": "~1.0.0",
"read": "~1.0.7",
"read-installed": "~4.0.3",
- "read-package-json": "~2.0.3",
- "readable-stream": "~1.1.13",
+ "read-package-json": "~2.0.4",
+ "readable-stream": "~2.1.2",
"realize-package-specifier": "~3.0.1",
- "request": "~2.69.0",
+ "request": "~2.72.0",
"retry": "~0.9.0",
"rimraf": "~2.5.2",
"semver": "~5.1.0",
"sha": "~2.0.1",
"slide": "~1.1.6",
- "sorted-object": "~1.0.0",
+ "sorted-object": "~2.0.0",
"spdx-license-ids": "~1.2.1",
"strip-ansi": "~3.0.1",
"tar": "~2.2.1",
@@ -96,7 +96,7 @@
"umask": "~1.1.0",
"validate-npm-package-license": "~3.0.1",
"validate-npm-package-name": "~2.2.2",
- "which": "~1.2.4",
+ "which": "~1.2.8",
"wrappy": "~1.0.1",
"write-file-atomic": "~1.1.4"
},
@@ -178,11 +178,11 @@
"marked": "~0.3.5",
"marked-man": "~0.1.5",
"npm-registry-couchapp": "~2.6.12",
- "npm-registry-mock": "~1.0.0",
- "require-inject": "~1.3.0",
- "sprintf-js": "~1.0.2",
- "tacks": "~1.0.9",
- "tap": "~2.3.1"
+ "npm-registry-mock": "~1.0.1",
+ "require-inject": "~1.3.1",
+ "sprintf-js": "~1.0.3",
+ "tacks": "~1.2.1",
+ "tap": "~5.7.1"
},
"scripts": {
"dumpconf": "env | grep npm | sort | uniq",