diff --git a/LICENSE b/LICENSE index f493fde3613eb5..8133c4c0bbb9df 100644 --- a/LICENSE +++ b/LICENSE @@ -186,33 +186,6 @@ The externally maintained libraries used by io.js are: */ """ -- HTTP Parser, located at deps/http_parser. HTTP Parser's license follows: - """ - http_parser.c is based on src/http/ngx_http_parse.c from NGINX copyright - Igor Sysoev. - - Additional changes are licensed under the same terms as NGINX and - copyright Joyent, Inc. and other Node contributors. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to - deal in the Software without restriction, including without limitation the - rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - IN THE SOFTWARE. - """ - - Closure Linter is located at tools/closure_linter. Closure's license follows: """ diff --git a/Makefile b/Makefile index 51a950668d314e..e41f130788dbdb 100644 --- a/Makefile +++ b/Makefile @@ -39,7 +39,7 @@ $(NODE_G_EXE): config.gypi out/Makefile $(MAKE) -C out BUILDTYPE=Debug V=$(V) ln -fs out/Debug/$(NODE_EXE) $@ -out/Makefile: common.gypi deps/uv/uv.gyp deps/http_parser/http_parser.gyp deps/zlib/zlib.gyp deps/v8/build/toolchain.gypi deps/v8/build/features.gypi deps/v8/tools/gyp/v8.gyp node.gyp config.gypi +out/Makefile: common.gypi deps/uv/uv.gyp deps/zlib/zlib.gyp deps/v8/build/toolchain.gypi deps/v8/build/features.gypi deps/v8/tools/gyp/v8.gyp node.gyp config.gypi $(PYTHON) tools/gyp_node.py -f make config.gypi: configure diff --git a/Makefile.build b/Makefile.build index dad86cb517a9e6..8ede911ce16e03 100644 --- a/Makefile.build +++ b/Makefile.build @@ -233,7 +233,6 @@ NACL_ARCHES = nacl_ia32 nacl_x64 GYPFILES = \ common.gypi \ deps/cares/cares.gyp \ - deps/http_parser/http_parser.gyp \ deps/openssl/openssl.gyp \ deps/uv/uv.gyp \ deps/v8/tools/gyp/v8.gyp \ diff --git a/benchmark/http/parser.js b/benchmark/http/parser.js new file mode 100644 index 00000000000000..e00d8377b1cb36 --- /dev/null +++ b/benchmark/http/parser.js @@ -0,0 +1,199 @@ +var common = require('../common.js'); +var HTTPParser = require('_http_parser'); +var CRLF = '\r\n'; +var REQUEST = HTTPParser.REQUEST; +var RESPONSE = HTTPParser.RESPONSE; + +var bench = common.createBenchmark(main, { + n: [100000], + type: [ + 'small-req', + 'small-res', + 'small-alternate', + 'medium-req', + 'medium-res', + 'medium-alternate', + 'medium-req-chunked', + 'medium-res-chunked', + 'large-req', + 'large-res', + 'large-alternate', + 'large-req-chunked', + 'large-res-chunked', + ] +}); + +var inputs = { + 'small-req': [ + 'GET /index.html HTTP/1.1' + CRLF + + 'Host: www.example.com' + CRLF + CRLF + ], + 'small-res': [ + 'HTTP/1.1 200 OK' + CRLF + + 'Date: Mon, 23 May 2005 22:38:34 GMT' + CRLF + CRLF + ], + 'small-alternate': true, + 'medium-req': [ + 'POST /it HTTP/1.1' + CRLF + + 'Content-Type: text/plain' + CRLF + + 'Transfer-Encoding: chunked' + CRLF + + CRLF + + '3' + CRLF + + '123' + CRLF + + '6' + CRLF + + '123456' + CRLF + + 'A' + CRLF + + '1234567890' + CRLF + + '9' + CRLF + + '123456789' + CRLF + + 'C' + CRLF + + '123456789ABC' + CRLF + + 'F' + CRLF + + '123456789ABCDEF' + CRLF + + '0' + CRLF + ], + 'medium-res': [ + 'HTTP/1.0 200 OK' + CRLF + + 'Date: Mon, 23 May 2005 22:38:34 GMT' + CRLF + + 'Content-Type: text/plain' + CRLF + + 'Transfer-Encoding: chunked' + CRLF + + CRLF + + '3' + CRLF + + '123' + CRLF + + '6' + CRLF + + '123456' + CRLF + + 'A' + CRLF + + '1234567890' + CRLF + + '9' + CRLF + + '123456789' + CRLF + + 'C' + CRLF + + '123456789ABC' + CRLF + + 'F' + CRLF + + '123456789ABCDEF' + CRLF + + '0' + CRLF + ], + 'medium-alternate': true, + 'medium-req-chunked': [ + 'POST /it HTTP/', + '1.1' + CRLF, + 'Content-Type', + ': text', + '/plain', + CRLF, + 'Transfer-', + 'Encoding: chunked' + CRLF, + CRLF + '3' + CRLF + '123', + CRLF + '6' + CRLF + '123456' + CRLF + 'A' + CRLF, + '12345', + '67890' + CRLF, + '9' + CRLF + '123456789' + CRLF, + 'C' + CRLF + '123456789ABC' + CRLF + 'F' + CRLF + '123456789ABCDEF' + CRLF, + '0' + CRLF + ], + 'medium-res-chunked': [ + 'HTTP/1.0 2', + '00 OK' + CRLF + 'Date: Mo', + 'n, 23 May 2005 22', + ':38:34 GMT' + CRLF + 'Content-Type: text', + '/plain' + CRLF, + 'Transfer-Encoding: chu', + 'nked' + CRLF + CRLF + '3', + CRLF + '123' + CRLF + '6' + CRLF, + '123456' + CRLF + 'A' + CRLF + '1234567890' + CRLF, + '9' + CRLF, + '123456789' + CRLF, + 'C' + CRLF, + '123456789ABC' + CRLF, + 'F' + CRLF, + '123456789ABCDEF' + CRLF + '0' + CRLF + ], + 'large-req': [ + 'POST /foo/bar/baz?quux=42#1337 HTTP/1.0' + CRLF + + new Array(256).join('X-Filler: 42' + CRLF) + CRLF + ], + 'large-res': [ + 'HTTP/1.1 200 OK' + CRLF + + 'Content-Type: text/nonsense' + CRLF, + 'Content-Length: 3572' + CRLF + CRLF + + new Array(256).join('X-Filler: 42' + CRLF) + CRLF + ], + 'large-alternate': true, + 'large-req-chunked': + ('POST /foo/bar/baz?quux=42#1337 HTTP/1.0' + CRLF + + new Array(256).join('X-Filler: 42' + CRLF) + CRLF).match(/.{1,144}/g) + , + 'large-res-chunked': + ('HTTP/1.1 200 OK' + CRLF + + 'Content-Type: text/nonsense' + CRLF, + 'Content-Length: 3572' + CRLF + CRLF + + new Array(256).join('X-Filler: 42' + CRLF) + CRLF).match(/.{1,144}/g) + , +}; + +function onHeaders(versionMajor, versionMinor, headers, method, url, statusCode, + statusMessage, upgrade, shouldKeepAlive) { +} +function onBody(data, start, len) { +} +function onComplete() { +} + +function main(conf) { + var type = conf.type; + var chunks = inputs[type]; + var n = +conf.n; + var nchunks = (chunks !== true ? chunks.length : 1); + var kind = (/\-req\-?/i.exec(type) ? REQUEST : RESPONSE); + var altsize = /^([^\-]+)\-alternate$/.exec(type); + var req; + var res; + + if (altsize) + altsize = altsize[1]; + + // Convert strings to Buffers first ... + if (chunks === true) { + // alternating + req = new Buffer(inputs[altsize + '-req'].join(''), 'binary'); + res = new Buffer(inputs[altsize + '-res'].join(''), 'binary'); + kind = REQUEST; + } else { + for (var i = 0; i < nchunks; ++i) + chunks[i] = new Buffer(chunks[i], 'binary'); + } + + var parser = new HTTPParser(kind); + parser.onHeaders = onHeaders; + parser.onBody = onBody; + parser.onComplete = onComplete; + + if (altsize) { + // Allow V8 to optimize first ... + for (var j = 0; j < 1000; ++j) { + parser.reinitialize(REQUEST); + parser.execute(req); + parser.reinitialize(RESPONSE); + parser.execute(res); + } + bench.start(); + for (var c = 0; c < n; ++c) { + parser.reinitialize(REQUEST); + parser.execute(req); + parser.reinitialize(RESPONSE); + parser.execute(res); + } + bench.end(n * 2); + } else { + // Allow V8 to optimize first ... + for (var j = 0; j < 1000; ++j) { + for (var i = 0; i < nchunks; ++i) + parser.execute(chunks[i]); + } + bench.start(); + for (var c = 0; c < n; ++c) { + for (var i = 0; i < nchunks; ++i) + parser.execute(chunks[i]); + } + bench.end(n * nchunks); + } +} diff --git a/configure b/configure index 3f7992e0e4d482..32b02170221bbb 100755 --- a/configure +++ b/configure @@ -93,27 +93,6 @@ parser.add_option('--openssl-fips', dest='openssl_fips', help='Build OpenSSL using FIPS canister .o file in supplied folder') -shared_optgroup.add_option('--shared-http-parser', - action='store_true', - dest='shared_http_parser', - help='link to a shared http_parser DLL instead of static linking') - -shared_optgroup.add_option('--shared-http-parser-includes', - action='store', - dest='shared_http_parser_includes', - help='directory containing http_parser header files') - -shared_optgroup.add_option('--shared-http-parser-libname', - action='store', - dest='shared_http_parser_libname', - default='http_parser', - help='alternative lib name to link to [default: %default]') - -shared_optgroup.add_option('--shared-http-parser-libpath', - action='store', - dest='shared_http_parser_libpath', - help='a directory to search for the shared http_parser DLL') - shared_optgroup.add_option('--shared-libuv', action='store_true', dest='shared_libuv', @@ -1017,7 +996,6 @@ flavor = GetFlavor(flavor_params) configure_node(output) configure_library('zlib', output) -configure_library('http_parser', output) configure_library('libuv', output) configure_v8(output) configure_openssl(output) diff --git a/deps/http_parser/.gitignore b/deps/http_parser/.gitignore deleted file mode 100644 index 32cb51b2d3f6f6..00000000000000 --- a/deps/http_parser/.gitignore +++ /dev/null @@ -1,28 +0,0 @@ -/out/ -core -tags -*.o -test -test_g -test_fast -bench -url_parser -parsertrace -parsertrace_g -*.mk -*.Makefile -*.so.* -*.a - - -# Visual Studio uglies -*.suo -*.sln -*.vcxproj -*.vcxproj.filters -*.vcxproj.user -*.opensdf -*.ncrunchsolution* -*.sdf -*.vsp -*.psess diff --git a/deps/http_parser/.mailmap b/deps/http_parser/.mailmap deleted file mode 100644 index 278d1412637240..00000000000000 --- a/deps/http_parser/.mailmap +++ /dev/null @@ -1,8 +0,0 @@ -# update AUTHORS with: -# git log --all --reverse --format='%aN <%aE>' | perl -ne 'BEGIN{print "# Authors ordered by first contribution.\n"} print unless $h{$_}; $h{$_} = 1' > AUTHORS -Ryan Dahl -Salman Haq -Simon Zimmermann -Thomas LE ROUX LE ROUX Thomas -Thomas LE ROUX Thomas LE ROUX -Fedor Indutny diff --git a/deps/http_parser/.travis.yml b/deps/http_parser/.travis.yml deleted file mode 100644 index 4b038e6e62d638..00000000000000 --- a/deps/http_parser/.travis.yml +++ /dev/null @@ -1,13 +0,0 @@ -language: c - -compiler: - - clang - - gcc - -script: - - "make" - -notifications: - email: false - irc: - - "irc.freenode.net#node-ci" diff --git a/deps/http_parser/AUTHORS b/deps/http_parser/AUTHORS deleted file mode 100644 index 8e2df1d06e6f69..00000000000000 --- a/deps/http_parser/AUTHORS +++ /dev/null @@ -1,67 +0,0 @@ -# Authors ordered by first contribution. -Ryan Dahl -Jeremy Hinegardner -Sergey Shepelev -Joe Damato -tomika -Phoenix Sol -Cliff Frey -Ewen Cheslack-Postava -Santiago Gala -Tim Becker -Jeff Terrace -Ben Noordhuis -Nathan Rajlich -Mark Nottingham -Aman Gupta -Tim Becker -Sean Cunningham -Peter Griess -Salman Haq -Cliff Frey -Jon Kolb -Fouad Mardini -Paul Querna -Felix Geisendörfer -koichik -Andre Caron -Ivo Raisr -James McLaughlin -David Gwynne -Thomas LE ROUX -Randy Rizun -Andre Louis Caron -Simon Zimmermann -Erik Dubbelboer -Martell Malone -Bertrand Paquet -BogDan Vatra -Peter Faiman -Corey Richardson -Tóth Tamás -Cam Swords -Chris Dickinson -Uli Köhler -Charlie Somerville -Patrik Stutz -Fedor Indutny -runner -Alexis Campailla -David Wragg -Vinnie Falco -Alex Butum -Rex Feng -Alex Kocharin -Mark Koopman -Helge Heß -Alexis La Goutte -George Miroshnykov -Maciej Małecki -Marc O'Morain -Jeff Pinner -Timothy J Fontaine -Akagi201 -Romain Giraud -Jay Satiro -Arne Steen -Kjell Schubert diff --git a/deps/http_parser/LICENSE-MIT b/deps/http_parser/LICENSE-MIT deleted file mode 100644 index 58010b388945f9..00000000000000 --- a/deps/http_parser/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -http_parser.c is based on src/http/ngx_http_parse.c from NGINX copyright -Igor Sysoev. - -Additional changes are licensed under the same terms as NGINX and -copyright Joyent, Inc. and other Node contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/deps/http_parser/Makefile b/deps/http_parser/Makefile deleted file mode 100644 index 373709c6672e31..00000000000000 --- a/deps/http_parser/Makefile +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright Joyent, Inc. and other Node contributors. All rights reserved. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. - -PLATFORM ?= $(shell sh -c 'uname -s | tr "[A-Z]" "[a-z]"') -SONAME ?= libhttp_parser.so.2.5.0 - -CC?=gcc -AR?=ar - -CPPFLAGS ?= -LDFLAGS ?= - -CPPFLAGS += -I. -CPPFLAGS_DEBUG = $(CPPFLAGS) -DHTTP_PARSER_STRICT=1 -CPPFLAGS_DEBUG += $(CPPFLAGS_DEBUG_EXTRA) -CPPFLAGS_FAST = $(CPPFLAGS) -DHTTP_PARSER_STRICT=0 -CPPFLAGS_FAST += $(CPPFLAGS_FAST_EXTRA) -CPPFLAGS_BENCH = $(CPPFLAGS_FAST) - -CFLAGS += -Wall -Wextra -Werror -CFLAGS_DEBUG = $(CFLAGS) -O0 -g $(CFLAGS_DEBUG_EXTRA) -CFLAGS_FAST = $(CFLAGS) -O3 $(CFLAGS_FAST_EXTRA) -CFLAGS_BENCH = $(CFLAGS_FAST) -Wno-unused-parameter -CFLAGS_LIB = $(CFLAGS_FAST) -fPIC - -LDFLAGS_LIB = $(LDFLAGS) -shared - -INSTALL ?= install -PREFIX ?= $(DESTDIR)/usr/local -LIBDIR = $(PREFIX)/lib -INCLUDEDIR = $(PREFIX)/include - -ifneq (darwin,$(PLATFORM)) -# TODO(bnoordhuis) The native SunOS linker expects -h rather than -soname... -LDFLAGS_LIB += -Wl,-soname=$(SONAME) -endif - -test: test_g test_fast - ./test_g - ./test_fast - -test_g: http_parser_g.o test_g.o - $(CC) $(CFLAGS_DEBUG) $(LDFLAGS) http_parser_g.o test_g.o -o $@ - -test_g.o: test.c http_parser.h Makefile - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c test.c -o $@ - -http_parser_g.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) -c http_parser.c -o $@ - -test_fast: http_parser.o test.o http_parser.h - $(CC) $(CFLAGS_FAST) $(LDFLAGS) http_parser.o test.o -o $@ - -test.o: test.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c test.c -o $@ - -bench: http_parser.o bench.o - $(CC) $(CFLAGS_BENCH) $(LDFLAGS) http_parser.o bench.o -o $@ - -bench.o: bench.c http_parser.h Makefile - $(CC) $(CPPFLAGS_BENCH) $(CFLAGS_BENCH) -c bench.c -o $@ - -http_parser.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) -c http_parser.c - -test-run-timed: test_fast - while(true) do time ./test_fast > /dev/null; done - -test-valgrind: test_g - valgrind ./test_g - -libhttp_parser.o: http_parser.c http_parser.h Makefile - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_LIB) -c http_parser.c -o libhttp_parser.o - -library: libhttp_parser.o - $(CC) $(LDFLAGS_LIB) -o $(SONAME) $< - -package: http_parser.o - $(AR) rcs libhttp_parser.a http_parser.o - -url_parser: http_parser.o contrib/url_parser.c - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o $@ - -url_parser_g: http_parser_g.o contrib/url_parser.c - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o $@ - -parsertrace: http_parser.o contrib/parsertrace.c - $(CC) $(CPPFLAGS_FAST) $(CFLAGS_FAST) $^ -o parsertrace - -parsertrace_g: http_parser_g.o contrib/parsertrace.c - $(CC) $(CPPFLAGS_DEBUG) $(CFLAGS_DEBUG) $^ -o parsertrace_g - -tags: http_parser.c http_parser.h test.c - ctags $^ - -install: library - $(INSTALL) -D http_parser.h $(INCLUDEDIR)/http_parser.h - $(INSTALL) -D $(SONAME) $(LIBDIR)/$(SONAME) - ln -s $(LIBDIR)/$(SONAME) $(LIBDIR)/libhttp_parser.so - -install-strip: library - $(INSTALL) -D http_parser.h $(INCLUDEDIR)/http_parser.h - $(INSTALL) -D -s $(SONAME) $(LIBDIR)/$(SONAME) - ln -s $(LIBDIR)/$(SONAME) $(LIBDIR)/libhttp_parser.so - -uninstall: - rm $(INCLUDEDIR)/http_parser.h - rm $(LIBDIR)/$(SONAME) - rm $(LIBDIR)/libhttp_parser.so - -clean: - rm -f *.o *.a tags test test_fast test_g \ - http_parser.tar libhttp_parser.so.* \ - url_parser url_parser_g parsertrace parsertrace_g - -contrib/url_parser.c: http_parser.h -contrib/parsertrace.c: http_parser.h - -.PHONY: clean package test-run test-run-timed test-valgrind install install-strip uninstall diff --git a/deps/http_parser/README.md b/deps/http_parser/README.md deleted file mode 100644 index 7c54dd42d087c3..00000000000000 --- a/deps/http_parser/README.md +++ /dev/null @@ -1,183 +0,0 @@ -HTTP Parser -=========== - -[![Build Status](https://travis-ci.org/joyent/http-parser.png?branch=master)](https://travis-ci.org/joyent/http-parser) - -This is a parser for HTTP messages written in C. It parses both requests and -responses. The parser is designed to be used in performance HTTP -applications. It does not make any syscalls nor allocations, it does not -buffer data, it can be interrupted at anytime. Depending on your -architecture, it only requires about 40 bytes of data per message -stream (in a web server that is per connection). - -Features: - - * No dependencies - * Handles persistent streams (keep-alive). - * Decodes chunked encoding. - * Upgrade support - * Defends against buffer overflow attacks. - -The parser extracts the following information from HTTP messages: - - * Header fields and values - * Content-Length - * Request method - * Response status code - * Transfer-Encoding - * HTTP version - * Request URL - * Message body - - -Usage ------ - -One `http_parser` object is used per TCP connection. Initialize the struct -using `http_parser_init()` and set the callbacks. That might look something -like this for a request parser: -```c -http_parser_settings settings; -settings.on_url = my_url_callback; -settings.on_header_field = my_header_field_callback; -/* ... */ - -http_parser *parser = malloc(sizeof(http_parser)); -http_parser_init(parser, HTTP_REQUEST); -parser->data = my_socket; -``` - -When data is received on the socket execute the parser and check for errors. - -```c -size_t len = 80*1024, nparsed; -char buf[len]; -ssize_t recved; - -recved = recv(fd, buf, len, 0); - -if (recved < 0) { - /* Handle error. */ -} - -/* Start up / continue the parser. - * Note we pass recved==0 to signal that EOF has been received. - */ -nparsed = http_parser_execute(parser, &settings, buf, recved); - -if (parser->upgrade) { - /* handle new protocol */ -} else if (nparsed != recved) { - /* Handle error. Usually just close the connection. */ -} -``` - -HTTP needs to know where the end of the stream is. For example, sometimes -servers send responses without Content-Length and expect the client to -consume input (for the body) until EOF. To tell http_parser about EOF, give -`0` as the fourth parameter to `http_parser_execute()`. Callbacks and errors -can still be encountered during an EOF, so one must still be prepared -to receive them. - -Scalar valued message information such as `status_code`, `method`, and the -HTTP version are stored in the parser structure. This data is only -temporally stored in `http_parser` and gets reset on each new message. If -this information is needed later, copy it out of the structure during the -`headers_complete` callback. - -The parser decodes the transfer-encoding for both requests and responses -transparently. That is, a chunked encoding is decoded before being sent to -the on_body callback. - - -The Special Problem of Upgrade ------------------------------- - -HTTP supports upgrading the connection to a different protocol. An -increasingly common example of this is the Web Socket protocol which sends -a request like - - GET /demo HTTP/1.1 - Upgrade: WebSocket - Connection: Upgrade - Host: example.com - Origin: http://example.com - WebSocket-Protocol: sample - -followed by non-HTTP data. - -(See http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-75 for more -information the Web Socket protocol.) - -To support this, the parser will treat this as a normal HTTP message without a -body, issuing both on_headers_complete and on_message_complete callbacks. However -http_parser_execute() will stop parsing at the end of the headers and return. - -The user is expected to check if `parser->upgrade` has been set to 1 after -`http_parser_execute()` returns. Non-HTTP data begins at the buffer supplied -offset by the return value of `http_parser_execute()`. - - -Callbacks ---------- - -During the `http_parser_execute()` call, the callbacks set in -`http_parser_settings` will be executed. The parser maintains state and -never looks behind, so buffering the data is not necessary. If you need to -save certain data for later usage, you can do that from the callbacks. - -There are two types of callbacks: - -* notification `typedef int (*http_cb) (http_parser*);` - Callbacks: on_message_begin, on_headers_complete, on_message_complete. -* data `typedef int (*http_data_cb) (http_parser*, const char *at, size_t length);` - Callbacks: (requests only) on_url, - (common) on_header_field, on_header_value, on_body; - -Callbacks must return 0 on success. Returning a non-zero value indicates -error to the parser, making it exit immediately. - -In case you parse HTTP message in chunks (i.e. `read()` request line -from socket, parse, read half headers, parse, etc) your data callbacks -may be called more than once. Http-parser guarantees that data pointer is only -valid for the lifetime of callback. You can also `read()` into a heap allocated -buffer to avoid copying memory around if this fits your application. - -Reading headers may be a tricky task if you read/parse headers partially. -Basically, you need to remember whether last header callback was field or value -and apply the following logic: - - (on_header_field and on_header_value shortened to on_h_*) - ------------------------ ------------ -------------------------------------------- - | State (prev. callback) | Callback | Description/action | - ------------------------ ------------ -------------------------------------------- - | nothing (first call) | on_h_field | Allocate new buffer and copy callback data | - | | | into it | - ------------------------ ------------ -------------------------------------------- - | value | on_h_field | New header started. | - | | | Copy current name,value buffers to headers | - | | | list and allocate new buffer for new name | - ------------------------ ------------ -------------------------------------------- - | field | on_h_field | Previous name continues. Reallocate name | - | | | buffer and append callback data to it | - ------------------------ ------------ -------------------------------------------- - | field | on_h_value | Value for current header started. Allocate | - | | | new buffer and copy callback data to it | - ------------------------ ------------ -------------------------------------------- - | value | on_h_value | Value continues. Reallocate value buffer | - | | | and append callback data to it | - ------------------------ ------------ -------------------------------------------- - - -Parsing URLs ------------- - -A simplistic zero-copy URL parser is provided as `http_parser_parse_url()`. -Users of this library may wish to use it to parse URLs constructed from -consecutive `on_url` callbacks. - -See examples of reading in headers: - -* [partial example](http://gist.github.com/155877) in C -* [from http-parser tests](http://github.com/joyent/http-parser/blob/37a0ff8/test.c#L403) in C -* [from Node library](http://github.com/joyent/node/blob/842eaf4/src/http.js#L284) in Javascript diff --git a/deps/http_parser/contrib/parsertrace.c b/deps/http_parser/contrib/parsertrace.c deleted file mode 100644 index e7153680f467de..00000000000000 --- a/deps/http_parser/contrib/parsertrace.c +++ /dev/null @@ -1,160 +0,0 @@ -/* Based on src/http/ngx_http_parse.c from NGINX copyright Igor Sysoev - * - * Additional changes are licensed under the same terms as NGINX and - * copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ - -/* Dump what the parser finds to stdout as it happen */ - -#include "http_parser.h" -#include -#include -#include - -int on_message_begin(http_parser* _) { - (void)_; - printf("\n***MESSAGE BEGIN***\n\n"); - return 0; -} - -int on_headers_complete(http_parser* _) { - (void)_; - printf("\n***HEADERS COMPLETE***\n\n"); - return 0; -} - -int on_message_complete(http_parser* _) { - (void)_; - printf("\n***MESSAGE COMPLETE***\n\n"); - return 0; -} - -int on_url(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Url: %.*s\n", (int)length, at); - return 0; -} - -int on_header_field(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Header field: %.*s\n", (int)length, at); - return 0; -} - -int on_header_value(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Header value: %.*s\n", (int)length, at); - return 0; -} - -int on_body(http_parser* _, const char* at, size_t length) { - (void)_; - printf("Body: %.*s\n", (int)length, at); - return 0; -} - -void usage(const char* name) { - fprintf(stderr, - "Usage: %s $type $filename\n" - " type: -x, where x is one of {r,b,q}\n" - " parses file as a Response, reQuest, or Both\n", - name); - exit(EXIT_FAILURE); -} - -int main(int argc, char* argv[]) { - enum http_parser_type file_type; - - if (argc != 3) { - usage(argv[0]); - } - - char* type = argv[1]; - if (type[0] != '-') { - usage(argv[0]); - } - - switch (type[1]) { - /* in the case of "-", type[1] will be NUL */ - case 'r': - file_type = HTTP_RESPONSE; - break; - case 'q': - file_type = HTTP_REQUEST; - break; - case 'b': - file_type = HTTP_BOTH; - break; - default: - usage(argv[0]); - } - - char* filename = argv[2]; - FILE* file = fopen(filename, "r"); - if (file == NULL) { - perror("fopen"); - goto fail; - } - - fseek(file, 0, SEEK_END); - long file_length = ftell(file); - if (file_length == -1) { - perror("ftell"); - goto fail; - } - fseek(file, 0, SEEK_SET); - - char* data = malloc(file_length); - if (fread(data, 1, file_length, file) != (size_t)file_length) { - fprintf(stderr, "couldn't read entire file\n"); - free(data); - goto fail; - } - - http_parser_settings settings; - memset(&settings, 0, sizeof(settings)); - settings.on_message_begin = on_message_begin; - settings.on_url = on_url; - settings.on_header_field = on_header_field; - settings.on_header_value = on_header_value; - settings.on_headers_complete = on_headers_complete; - settings.on_body = on_body; - settings.on_message_complete = on_message_complete; - - http_parser parser; - http_parser_init(&parser, file_type); - size_t nparsed = http_parser_execute(&parser, &settings, data, file_length); - free(data); - - if (nparsed != (size_t)file_length) { - fprintf(stderr, - "Error: %s (%s)\n", - http_errno_description(HTTP_PARSER_ERRNO(&parser)), - http_errno_name(HTTP_PARSER_ERRNO(&parser))); - goto fail; - } - - return EXIT_SUCCESS; - -fail: - fclose(file); - return EXIT_FAILURE; -} diff --git a/deps/http_parser/contrib/url_parser.c b/deps/http_parser/contrib/url_parser.c deleted file mode 100644 index 6650b414af9065..00000000000000 --- a/deps/http_parser/contrib/url_parser.c +++ /dev/null @@ -1,46 +0,0 @@ -#include "http_parser.h" -#include -#include - -void -dump_url (const char *url, const struct http_parser_url *u) -{ - unsigned int i; - - printf("\tfield_set: 0x%x, port: %u\n", u->field_set, u->port); - for (i = 0; i < UF_MAX; i++) { - if ((u->field_set & (1 << i)) == 0) { - printf("\tfield_data[%u]: unset\n", i); - continue; - } - - printf("\tfield_data[%u]: off: %u, len: %u, part: %.*s\n", - i, - u->field_data[i].off, - u->field_data[i].len, - u->field_data[i].len, - url + u->field_data[i].off); - } -} - -int main(int argc, char ** argv) { - struct http_parser_url u; - int len, connect, result; - - if (argc != 3) { - printf("Syntax : %s connect|get url\n", argv[0]); - return 1; - } - len = strlen(argv[2]); - connect = strcmp("connect", argv[1]) == 0 ? 1 : 0; - printf("Parsing %s, connect %d\n", argv[2], connect); - - result = http_parser_parse_url(argv[2], len, connect, &u); - if (result != 0) { - printf("Parse error : %d\n", result); - return result; - } - printf("Parse ok, result : \n"); - dump_url(argv[2], &u); - return 0; -} \ No newline at end of file diff --git a/deps/http_parser/http_parser.c b/deps/http_parser/http_parser.c deleted file mode 100644 index 0fa1c362729c4f..00000000000000 --- a/deps/http_parser/http_parser.c +++ /dev/null @@ -1,2429 +0,0 @@ -/* Based on src/http/ngx_http_parse.c from NGINX copyright Igor Sysoev - * - * Additional changes are licensed under the same terms as NGINX and - * copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#include "http_parser.h" -#include -#include -#include -#include -#include -#include - -#ifndef ULLONG_MAX -# define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ -#endif - -#ifndef MIN -# define MIN(a,b) ((a) < (b) ? (a) : (b)) -#endif - -#ifndef ARRAY_SIZE -# define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) -#endif - -#ifndef BIT_AT -# define BIT_AT(a, i) \ - (!!((unsigned int) (a)[(unsigned int) (i) >> 3] & \ - (1 << ((unsigned int) (i) & 7)))) -#endif - -#ifndef ELEM_AT -# define ELEM_AT(a, i, v) ((unsigned int) (i) < ARRAY_SIZE(a) ? (a)[(i)] : (v)) -#endif - -#define SET_ERRNO(e) \ -do { \ - parser->http_errno = (e); \ -} while(0) - -#define CURRENT_STATE() p_state -#define UPDATE_STATE(V) p_state = (enum state) (V); -#define RETURN(V) \ -do { \ - parser->state = CURRENT_STATE(); \ - return (V); \ -} while (0); -#define REEXECUTE() \ - goto reexecute; \ - - -#ifdef __GNUC__ -# define LIKELY(X) __builtin_expect(!!(X), 1) -# define UNLIKELY(X) __builtin_expect(!!(X), 0) -#else -# define LIKELY(X) (X) -# define UNLIKELY(X) (X) -#endif - - -/* Run the notify callback FOR, returning ER if it fails */ -#define CALLBACK_NOTIFY_(FOR, ER) \ -do { \ - assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ - \ - if (LIKELY(settings->on_##FOR)) { \ - parser->state = CURRENT_STATE(); \ - if (UNLIKELY(0 != settings->on_##FOR(parser))) { \ - SET_ERRNO(HPE_CB_##FOR); \ - } \ - UPDATE_STATE(parser->state); \ - \ - /* We either errored above or got paused; get out */ \ - if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ - return (ER); \ - } \ - } \ -} while (0) - -/* Run the notify callback FOR and consume the current byte */ -#define CALLBACK_NOTIFY(FOR) CALLBACK_NOTIFY_(FOR, p - data + 1) - -/* Run the notify callback FOR and don't consume the current byte */ -#define CALLBACK_NOTIFY_NOADVANCE(FOR) CALLBACK_NOTIFY_(FOR, p - data) - -/* Run data callback FOR with LEN bytes, returning ER if it fails */ -#define CALLBACK_DATA_(FOR, LEN, ER) \ -do { \ - assert(HTTP_PARSER_ERRNO(parser) == HPE_OK); \ - \ - if (FOR##_mark) { \ - if (LIKELY(settings->on_##FOR)) { \ - parser->state = CURRENT_STATE(); \ - if (UNLIKELY(0 != \ - settings->on_##FOR(parser, FOR##_mark, (LEN)))) { \ - SET_ERRNO(HPE_CB_##FOR); \ - } \ - UPDATE_STATE(parser->state); \ - \ - /* We either errored above or got paused; get out */ \ - if (UNLIKELY(HTTP_PARSER_ERRNO(parser) != HPE_OK)) { \ - return (ER); \ - } \ - } \ - FOR##_mark = NULL; \ - } \ -} while (0) - -/* Run the data callback FOR and consume the current byte */ -#define CALLBACK_DATA(FOR) \ - CALLBACK_DATA_(FOR, p - FOR##_mark, p - data + 1) - -/* Run the data callback FOR and don't consume the current byte */ -#define CALLBACK_DATA_NOADVANCE(FOR) \ - CALLBACK_DATA_(FOR, p - FOR##_mark, p - data) - -/* Set the mark FOR; non-destructive if mark is already set */ -#define MARK(FOR) \ -do { \ - if (!FOR##_mark) { \ - FOR##_mark = p; \ - } \ -} while (0) - -/* Don't allow the total size of the HTTP headers (including the status - * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect - * embedders against denial-of-service attacks where the attacker feeds - * us a never-ending header that the embedder keeps buffering. - * - * This check is arguably the responsibility of embedders but we're doing - * it on the embedder's behalf because most won't bother and this way we - * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger - * than any reasonable request or response so this should never affect - * day-to-day operation. - */ -#define COUNT_HEADER_SIZE(V) \ -do { \ - parser->nread += (V); \ - if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \ - SET_ERRNO(HPE_HEADER_OVERFLOW); \ - goto error; \ - } \ -} while (0) - - -#define PROXY_CONNECTION "proxy-connection" -#define CONNECTION "connection" -#define CONTENT_LENGTH "content-length" -#define TRANSFER_ENCODING "transfer-encoding" -#define UPGRADE "upgrade" -#define CHUNKED "chunked" -#define KEEP_ALIVE "keep-alive" -#define CLOSE "close" - - -static const char *method_strings[] = - { -#define XX(num, name, string) #string, - HTTP_METHOD_MAP(XX) -#undef XX - }; - - -/* Tokens as defined by rfc 2616. Also lowercases them. - * token = 1* - * separators = "(" | ")" | "<" | ">" | "@" - * | "," | ";" | ":" | "\" | <"> - * | "/" | "[" | "]" | "?" | "=" - * | "{" | "}" | SP | HT - */ -static const char tokens[256] = { -/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ - 0, 0, 0, 0, 0, 0, 0, 0, -/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ - 0, '!', 0, '#', '$', '%', '&', '\'', -/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ - 0, 0, '*', '+', 0, '-', '.', 0, -/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ - '0', '1', '2', '3', '4', '5', '6', '7', -/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ - '8', '9', 0, 0, 0, 0, 0, 0, -/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ - 0, 'a', 'b', 'c', 'd', 'e', 'f', 'g', -/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ - 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', -/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ - 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', -/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ - 'x', 'y', 'z', 0, 0, 0, '^', '_', -/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ - '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', -/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ - 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', -/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ - 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', -/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ - 'x', 'y', 'z', 0, '|', 0, '~', 0 }; - - -static const int8_t unhex[256] = - {-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - , 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1,-1,-1,-1,-1 - ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1 - ,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1 - }; - - -#if HTTP_PARSER_STRICT -# define T(v) 0 -#else -# define T(v) v -#endif - - -static const uint8_t normal_url_char[32] = { -/* 0 nul 1 soh 2 stx 3 etx 4 eot 5 enq 6 ack 7 bel */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 8 bs 9 ht 10 nl 11 vt 12 np 13 cr 14 so 15 si */ - 0 | T(2) | 0 | 0 | T(16) | 0 | 0 | 0, -/* 16 dle 17 dc1 18 dc2 19 dc3 20 dc4 21 nak 22 syn 23 etb */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 24 can 25 em 26 sub 27 esc 28 fs 29 gs 30 rs 31 us */ - 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0, -/* 32 sp 33 ! 34 " 35 # 36 $ 37 % 38 & 39 ' */ - 0 | 2 | 4 | 0 | 16 | 32 | 64 | 128, -/* 40 ( 41 ) 42 * 43 + 44 , 45 - 46 . 47 / */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 48 0 49 1 50 2 51 3 52 4 53 5 54 6 55 7 */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 56 8 57 9 58 : 59 ; 60 < 61 = 62 > 63 ? */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, -/* 64 @ 65 A 66 B 67 C 68 D 69 E 70 F 71 G */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 72 H 73 I 74 J 75 K 76 L 77 M 78 N 79 O */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 80 P 81 Q 82 R 83 S 84 T 85 U 86 V 87 W */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 88 X 89 Y 90 Z 91 [ 92 \ 93 ] 94 ^ 95 _ */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 96 ` 97 a 98 b 99 c 100 d 101 e 102 f 103 g */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 104 h 105 i 106 j 107 k 108 l 109 m 110 n 111 o */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 112 p 113 q 114 r 115 s 116 t 117 u 118 v 119 w */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 128, -/* 120 x 121 y 122 z 123 { 124 | 125 } 126 ~ 127 del */ - 1 | 2 | 4 | 8 | 16 | 32 | 64 | 0, }; - -#undef T - -enum state - { s_dead = 1 /* important that this is > 0 */ - - , s_start_req_or_res - , s_res_or_resp_H - , s_start_res - , s_res_H - , s_res_HT - , s_res_HTT - , s_res_HTTP - , s_res_first_http_major - , s_res_http_major - , s_res_first_http_minor - , s_res_http_minor - , s_res_first_status_code - , s_res_status_code - , s_res_status_start - , s_res_status - , s_res_line_almost_done - - , s_start_req - - , s_req_method - , s_req_spaces_before_url - , s_req_schema - , s_req_schema_slash - , s_req_schema_slash_slash - , s_req_server_start - , s_req_server - , s_req_server_with_at - , s_req_path - , s_req_query_string_start - , s_req_query_string - , s_req_fragment_start - , s_req_fragment - , s_req_http_start - , s_req_http_H - , s_req_http_HT - , s_req_http_HTT - , s_req_http_HTTP - , s_req_first_http_major - , s_req_http_major - , s_req_first_http_minor - , s_req_http_minor - , s_req_line_almost_done - - , s_header_field_start - , s_header_field - , s_header_value_discard_ws - , s_header_value_discard_ws_almost_done - , s_header_value_discard_lws - , s_header_value_start - , s_header_value - , s_header_value_lws - - , s_header_almost_done - - , s_chunk_size_start - , s_chunk_size - , s_chunk_parameters - , s_chunk_size_almost_done - - , s_headers_almost_done - , s_headers_done - - /* Important: 's_headers_done' must be the last 'header' state. All - * states beyond this must be 'body' states. It is used for overflow - * checking. See the PARSING_HEADER() macro. - */ - - , s_chunk_data - , s_chunk_data_almost_done - , s_chunk_data_done - - , s_body_identity - , s_body_identity_eof - - , s_message_done - }; - - -#define PARSING_HEADER(state) (state <= s_headers_done) - - -enum header_states - { h_general = 0 - , h_C - , h_CO - , h_CON - - , h_matching_connection - , h_matching_proxy_connection - , h_matching_content_length - , h_matching_transfer_encoding - , h_matching_upgrade - - , h_connection - , h_content_length - , h_transfer_encoding - , h_upgrade - - , h_matching_transfer_encoding_chunked - , h_matching_connection_token_start - , h_matching_connection_keep_alive - , h_matching_connection_close - , h_matching_connection_upgrade - , h_matching_connection_token - - , h_transfer_encoding_chunked - , h_connection_keep_alive - , h_connection_close - , h_connection_upgrade - }; - -enum http_host_state - { - s_http_host_dead = 1 - , s_http_userinfo_start - , s_http_userinfo - , s_http_host_start - , s_http_host_v6_start - , s_http_host - , s_http_host_v6 - , s_http_host_v6_end - , s_http_host_port_start - , s_http_host_port -}; - -/* Macros for character classes; depends on strict-mode */ -#define CR '\r' -#define LF '\n' -#define LOWER(c) (unsigned char)(c | 0x20) -#define IS_ALPHA(c) (LOWER(c) >= 'a' && LOWER(c) <= 'z') -#define IS_NUM(c) ((c) >= '0' && (c) <= '9') -#define IS_ALPHANUM(c) (IS_ALPHA(c) || IS_NUM(c)) -#define IS_HEX(c) (IS_NUM(c) || (LOWER(c) >= 'a' && LOWER(c) <= 'f')) -#define IS_MARK(c) ((c) == '-' || (c) == '_' || (c) == '.' || \ - (c) == '!' || (c) == '~' || (c) == '*' || (c) == '\'' || (c) == '(' || \ - (c) == ')') -#define IS_USERINFO_CHAR(c) (IS_ALPHANUM(c) || IS_MARK(c) || (c) == '%' || \ - (c) == ';' || (c) == ':' || (c) == '&' || (c) == '=' || (c) == '+' || \ - (c) == '$' || (c) == ',') - -#define STRICT_TOKEN(c) (tokens[(unsigned char)c]) - -#if HTTP_PARSER_STRICT -#define TOKEN(c) (tokens[(unsigned char)c]) -#define IS_URL_CHAR(c) (BIT_AT(normal_url_char, (unsigned char)c)) -#define IS_HOST_CHAR(c) (IS_ALPHANUM(c) || (c) == '.' || (c) == '-') -#else -#define TOKEN(c) ((c == ' ') ? ' ' : tokens[(unsigned char)c]) -#define IS_URL_CHAR(c) \ - (BIT_AT(normal_url_char, (unsigned char)c) || ((c) & 0x80)) -#define IS_HOST_CHAR(c) \ - (IS_ALPHANUM(c) || (c) == '.' || (c) == '-' || (c) == '_') -#endif - - -#define start_state (parser->type == HTTP_REQUEST ? s_start_req : s_start_res) - - -#if HTTP_PARSER_STRICT -# define STRICT_CHECK(cond) \ -do { \ - if (cond) { \ - SET_ERRNO(HPE_STRICT); \ - goto error; \ - } \ -} while (0) -# define NEW_MESSAGE() (http_should_keep_alive(parser) ? start_state : s_dead) -#else -# define STRICT_CHECK(cond) -# define NEW_MESSAGE() start_state -#endif - - -/* Map errno values to strings for human-readable output */ -#define HTTP_STRERROR_GEN(n, s) { "HPE_" #n, s }, -static struct { - const char *name; - const char *description; -} http_strerror_tab[] = { - HTTP_ERRNO_MAP(HTTP_STRERROR_GEN) -}; -#undef HTTP_STRERROR_GEN - -int http_message_needs_eof(const http_parser *parser); - -/* Our URL parser. - * - * This is designed to be shared by http_parser_execute() for URL validation, - * hence it has a state transition + byte-for-byte interface. In addition, it - * is meant to be embedded in http_parser_parse_url(), which does the dirty - * work of turning state transitions URL components for its API. - * - * This function should only be invoked with non-space characters. It is - * assumed that the caller cares about (and can detect) the transition between - * URL and non-URL states by looking for these. - */ -static enum state -parse_url_char(enum state s, const char ch) -{ - if (ch == ' ' || ch == '\r' || ch == '\n') { - return s_dead; - } - -#if HTTP_PARSER_STRICT - if (ch == '\t' || ch == '\f') { - return s_dead; - } -#endif - - switch (s) { - case s_req_spaces_before_url: - /* Proxied requests are followed by scheme of an absolute URI (alpha). - * All methods except CONNECT are followed by '/' or '*'. - */ - - if (ch == '/' || ch == '*') { - return s_req_path; - } - - if (IS_ALPHA(ch)) { - return s_req_schema; - } - - break; - - case s_req_schema: - if (IS_ALPHA(ch)) { - return s; - } - - if (ch == ':') { - return s_req_schema_slash; - } - - break; - - case s_req_schema_slash: - if (ch == '/') { - return s_req_schema_slash_slash; - } - - break; - - case s_req_schema_slash_slash: - if (ch == '/') { - return s_req_server_start; - } - - break; - - case s_req_server_with_at: - if (ch == '@') { - return s_dead; - } - - /* FALLTHROUGH */ - case s_req_server_start: - case s_req_server: - if (ch == '/') { - return s_req_path; - } - - if (ch == '?') { - return s_req_query_string_start; - } - - if (ch == '@') { - return s_req_server_with_at; - } - - if (IS_USERINFO_CHAR(ch) || ch == '[' || ch == ']') { - return s_req_server; - } - - break; - - case s_req_path: - if (IS_URL_CHAR(ch)) { - return s; - } - - switch (ch) { - case '?': - return s_req_query_string_start; - - case '#': - return s_req_fragment_start; - } - - break; - - case s_req_query_string_start: - case s_req_query_string: - if (IS_URL_CHAR(ch)) { - return s_req_query_string; - } - - switch (ch) { - case '?': - /* allow extra '?' in query string */ - return s_req_query_string; - - case '#': - return s_req_fragment_start; - } - - break; - - case s_req_fragment_start: - if (IS_URL_CHAR(ch)) { - return s_req_fragment; - } - - switch (ch) { - case '?': - return s_req_fragment; - - case '#': - return s; - } - - break; - - case s_req_fragment: - if (IS_URL_CHAR(ch)) { - return s; - } - - switch (ch) { - case '?': - case '#': - return s; - } - - break; - - default: - break; - } - - /* We should never fall out of the switch above unless there's an error */ - return s_dead; -} - -size_t http_parser_execute (http_parser *parser, - const http_parser_settings *settings, - const char *data, - size_t len) -{ - char c, ch; - int8_t unhex_val; - const char *p = data; - const char *header_field_mark = 0; - const char *header_value_mark = 0; - const char *url_mark = 0; - const char *body_mark = 0; - const char *status_mark = 0; - enum state p_state = (enum state) parser->state; - - /* We're in an error state. Don't bother doing anything. */ - if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { - return 0; - } - - if (len == 0) { - switch (CURRENT_STATE()) { - case s_body_identity_eof: - /* Use of CALLBACK_NOTIFY() here would erroneously return 1 byte read if - * we got paused. - */ - CALLBACK_NOTIFY_NOADVANCE(message_complete); - return 0; - - case s_dead: - case s_start_req_or_res: - case s_start_res: - case s_start_req: - return 0; - - default: - SET_ERRNO(HPE_INVALID_EOF_STATE); - return 1; - } - } - - - if (CURRENT_STATE() == s_header_field) - header_field_mark = data; - if (CURRENT_STATE() == s_header_value) - header_value_mark = data; - switch (CURRENT_STATE()) { - case s_req_path: - case s_req_schema: - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - case s_req_server: - case s_req_server_with_at: - case s_req_query_string_start: - case s_req_query_string: - case s_req_fragment_start: - case s_req_fragment: - url_mark = data; - break; - case s_res_status: - status_mark = data; - break; - default: - break; - } - - for (p=data; p != data + len; p++) { - ch = *p; - - if (PARSING_HEADER(CURRENT_STATE())) - COUNT_HEADER_SIZE(1); - -reexecute: - switch (CURRENT_STATE()) { - - case s_dead: - /* this state is used after a 'Connection: close' message - * the parser will error out if it reads another message - */ - if (LIKELY(ch == CR || ch == LF)) - break; - - SET_ERRNO(HPE_CLOSED_CONNECTION); - goto error; - - case s_start_req_or_res: - { - if (ch == CR || ch == LF) - break; - parser->flags = 0; - parser->content_length = ULLONG_MAX; - - if (ch == 'H') { - UPDATE_STATE(s_res_or_resp_H); - - CALLBACK_NOTIFY(message_begin); - } else { - parser->type = HTTP_REQUEST; - UPDATE_STATE(s_start_req); - REEXECUTE(); - } - - break; - } - - case s_res_or_resp_H: - if (ch == 'T') { - parser->type = HTTP_RESPONSE; - UPDATE_STATE(s_res_HT); - } else { - if (UNLIKELY(ch != 'E')) { - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - - parser->type = HTTP_REQUEST; - parser->method = HTTP_HEAD; - parser->index = 2; - UPDATE_STATE(s_req_method); - } - break; - - case s_start_res: - { - parser->flags = 0; - parser->content_length = ULLONG_MAX; - - switch (ch) { - case 'H': - UPDATE_STATE(s_res_H); - break; - - case CR: - case LF: - break; - - default: - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - - CALLBACK_NOTIFY(message_begin); - break; - } - - case s_res_H: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_res_HT); - break; - - case s_res_HT: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_res_HTT); - break; - - case s_res_HTT: - STRICT_CHECK(ch != 'P'); - UPDATE_STATE(s_res_HTTP); - break; - - case s_res_HTTP: - STRICT_CHECK(ch != '/'); - UPDATE_STATE(s_res_first_http_major); - break; - - case s_res_first_http_major: - if (UNLIKELY(ch < '0' || ch > '9')) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major = ch - '0'; - UPDATE_STATE(s_res_http_major); - break; - - /* major HTTP version or dot */ - case s_res_http_major: - { - if (ch == '.') { - UPDATE_STATE(s_res_first_http_minor); - break; - } - - if (!IS_NUM(ch)) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major *= 10; - parser->http_major += ch - '0'; - - if (UNLIKELY(parser->http_major > 999)) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - break; - } - - /* first digit of minor HTTP version */ - case s_res_first_http_minor: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor = ch - '0'; - UPDATE_STATE(s_res_http_minor); - break; - - /* minor HTTP version or end of request line */ - case s_res_http_minor: - { - if (ch == ' ') { - UPDATE_STATE(s_res_first_status_code); - break; - } - - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor *= 10; - parser->http_minor += ch - '0'; - - if (UNLIKELY(parser->http_minor > 999)) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - break; - } - - case s_res_first_status_code: - { - if (!IS_NUM(ch)) { - if (ch == ' ') { - break; - } - - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - parser->status_code = ch - '0'; - UPDATE_STATE(s_res_status_code); - break; - } - - case s_res_status_code: - { - if (!IS_NUM(ch)) { - switch (ch) { - case ' ': - UPDATE_STATE(s_res_status_start); - break; - case CR: - UPDATE_STATE(s_res_line_almost_done); - break; - case LF: - UPDATE_STATE(s_header_field_start); - break; - default: - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - break; - } - - parser->status_code *= 10; - parser->status_code += ch - '0'; - - if (UNLIKELY(parser->status_code > 999)) { - SET_ERRNO(HPE_INVALID_STATUS); - goto error; - } - - break; - } - - case s_res_status_start: - { - if (ch == CR) { - UPDATE_STATE(s_res_line_almost_done); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_field_start); - break; - } - - MARK(status); - UPDATE_STATE(s_res_status); - parser->index = 0; - break; - } - - case s_res_status: - if (ch == CR) { - UPDATE_STATE(s_res_line_almost_done); - CALLBACK_DATA(status); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_field_start); - CALLBACK_DATA(status); - break; - } - - break; - - case s_res_line_almost_done: - STRICT_CHECK(ch != LF); - UPDATE_STATE(s_header_field_start); - break; - - case s_start_req: - { - if (ch == CR || ch == LF) - break; - parser->flags = 0; - parser->content_length = ULLONG_MAX; - - if (UNLIKELY(!IS_ALPHA(ch))) { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - parser->method = (enum http_method) 0; - parser->index = 1; - switch (ch) { - case 'C': parser->method = HTTP_CONNECT; /* or COPY, CHECKOUT */ break; - case 'D': parser->method = HTTP_DELETE; break; - case 'G': parser->method = HTTP_GET; break; - case 'H': parser->method = HTTP_HEAD; break; - case 'L': parser->method = HTTP_LOCK; break; - case 'M': parser->method = HTTP_MKCOL; /* or MOVE, MKACTIVITY, MERGE, M-SEARCH, MKCALENDAR */ break; - case 'N': parser->method = HTTP_NOTIFY; break; - case 'O': parser->method = HTTP_OPTIONS; break; - case 'P': parser->method = HTTP_POST; - /* or PROPFIND|PROPPATCH|PUT|PATCH|PURGE */ - break; - case 'R': parser->method = HTTP_REPORT; break; - case 'S': parser->method = HTTP_SUBSCRIBE; /* or SEARCH */ break; - case 'T': parser->method = HTTP_TRACE; break; - case 'U': parser->method = HTTP_UNLOCK; /* or UNSUBSCRIBE */ break; - default: - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - UPDATE_STATE(s_req_method); - - CALLBACK_NOTIFY(message_begin); - - break; - } - - case s_req_method: - { - const char *matcher; - if (UNLIKELY(ch == '\0')) { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - matcher = method_strings[parser->method]; - if (ch == ' ' && matcher[parser->index] == '\0') { - UPDATE_STATE(s_req_spaces_before_url); - } else if (ch == matcher[parser->index]) { - ; /* nada */ - } else if (parser->method == HTTP_CONNECT) { - if (parser->index == 1 && ch == 'H') { - parser->method = HTTP_CHECKOUT; - } else if (parser->index == 2 && ch == 'P') { - parser->method = HTTP_COPY; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->method == HTTP_MKCOL) { - if (parser->index == 1 && ch == 'O') { - parser->method = HTTP_MOVE; - } else if (parser->index == 1 && ch == 'E') { - parser->method = HTTP_MERGE; - } else if (parser->index == 1 && ch == '-') { - parser->method = HTTP_MSEARCH; - } else if (parser->index == 2 && ch == 'A') { - parser->method = HTTP_MKACTIVITY; - } else if (parser->index == 3 && ch == 'A') { - parser->method = HTTP_MKCALENDAR; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->method == HTTP_SUBSCRIBE) { - if (parser->index == 1 && ch == 'E') { - parser->method = HTTP_SEARCH; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->index == 1 && parser->method == HTTP_POST) { - if (ch == 'R') { - parser->method = HTTP_PROPFIND; /* or HTTP_PROPPATCH */ - } else if (ch == 'U') { - parser->method = HTTP_PUT; /* or HTTP_PURGE */ - } else if (ch == 'A') { - parser->method = HTTP_PATCH; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->index == 2) { - if (parser->method == HTTP_PUT) { - if (ch == 'R') { - parser->method = HTTP_PURGE; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->method == HTTP_UNLOCK) { - if (ch == 'S') { - parser->method = HTTP_UNSUBSCRIBE; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - } else if (parser->index == 4 && parser->method == HTTP_PROPFIND && ch == 'P') { - parser->method = HTTP_PROPPATCH; - } else { - SET_ERRNO(HPE_INVALID_METHOD); - goto error; - } - - ++parser->index; - break; - } - - case s_req_spaces_before_url: - { - if (ch == ' ') break; - - MARK(url); - if (parser->method == HTTP_CONNECT) { - UPDATE_STATE(s_req_server_start); - } - - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - - break; - } - - case s_req_schema: - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - { - switch (ch) { - /* No whitespace allowed here */ - case ' ': - case CR: - case LF: - SET_ERRNO(HPE_INVALID_URL); - goto error; - default: - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - } - - break; - } - - case s_req_server: - case s_req_server_with_at: - case s_req_path: - case s_req_query_string_start: - case s_req_query_string: - case s_req_fragment_start: - case s_req_fragment: - { - switch (ch) { - case ' ': - UPDATE_STATE(s_req_http_start); - CALLBACK_DATA(url); - break; - case CR: - case LF: - parser->http_major = 0; - parser->http_minor = 9; - UPDATE_STATE((ch == CR) ? - s_req_line_almost_done : - s_header_field_start); - CALLBACK_DATA(url); - break; - default: - UPDATE_STATE(parse_url_char(CURRENT_STATE(), ch)); - if (UNLIKELY(CURRENT_STATE() == s_dead)) { - SET_ERRNO(HPE_INVALID_URL); - goto error; - } - } - break; - } - - case s_req_http_start: - switch (ch) { - case 'H': - UPDATE_STATE(s_req_http_H); - break; - case ' ': - break; - default: - SET_ERRNO(HPE_INVALID_CONSTANT); - goto error; - } - break; - - case s_req_http_H: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_req_http_HT); - break; - - case s_req_http_HT: - STRICT_CHECK(ch != 'T'); - UPDATE_STATE(s_req_http_HTT); - break; - - case s_req_http_HTT: - STRICT_CHECK(ch != 'P'); - UPDATE_STATE(s_req_http_HTTP); - break; - - case s_req_http_HTTP: - STRICT_CHECK(ch != '/'); - UPDATE_STATE(s_req_first_http_major); - break; - - /* first digit of major HTTP version */ - case s_req_first_http_major: - if (UNLIKELY(ch < '1' || ch > '9')) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major = ch - '0'; - UPDATE_STATE(s_req_http_major); - break; - - /* major HTTP version or dot */ - case s_req_http_major: - { - if (ch == '.') { - UPDATE_STATE(s_req_first_http_minor); - break; - } - - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_major *= 10; - parser->http_major += ch - '0'; - - if (UNLIKELY(parser->http_major > 999)) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - break; - } - - /* first digit of minor HTTP version */ - case s_req_first_http_minor: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor = ch - '0'; - UPDATE_STATE(s_req_http_minor); - break; - - /* minor HTTP version or end of request line */ - case s_req_http_minor: - { - if (ch == CR) { - UPDATE_STATE(s_req_line_almost_done); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_field_start); - break; - } - - /* XXX allow spaces after digit? */ - - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - parser->http_minor *= 10; - parser->http_minor += ch - '0'; - - if (UNLIKELY(parser->http_minor > 999)) { - SET_ERRNO(HPE_INVALID_VERSION); - goto error; - } - - break; - } - - /* end of request line */ - case s_req_line_almost_done: - { - if (UNLIKELY(ch != LF)) { - SET_ERRNO(HPE_LF_EXPECTED); - goto error; - } - - UPDATE_STATE(s_header_field_start); - break; - } - - case s_header_field_start: - { - if (ch == CR) { - UPDATE_STATE(s_headers_almost_done); - break; - } - - if (ch == LF) { - /* they might be just sending \n instead of \r\n so this would be - * the second \n to denote the end of headers*/ - UPDATE_STATE(s_headers_almost_done); - REEXECUTE(); - } - - c = TOKEN(ch); - - if (UNLIKELY(!c)) { - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - - MARK(header_field); - - parser->index = 0; - UPDATE_STATE(s_header_field); - - switch (c) { - case 'c': - parser->header_state = h_C; - break; - - case 'p': - parser->header_state = h_matching_proxy_connection; - break; - - case 't': - parser->header_state = h_matching_transfer_encoding; - break; - - case 'u': - parser->header_state = h_matching_upgrade; - break; - - default: - parser->header_state = h_general; - break; - } - break; - } - - case s_header_field: - { - const char* start = p; - for (; p != data + len; p++) { - ch = *p; - c = TOKEN(ch); - - if (!c) - break; - - switch (parser->header_state) { - case h_general: - break; - - case h_C: - parser->index++; - parser->header_state = (c == 'o' ? h_CO : h_general); - break; - - case h_CO: - parser->index++; - parser->header_state = (c == 'n' ? h_CON : h_general); - break; - - case h_CON: - parser->index++; - switch (c) { - case 'n': - parser->header_state = h_matching_connection; - break; - case 't': - parser->header_state = h_matching_content_length; - break; - default: - parser->header_state = h_general; - break; - } - break; - - /* connection */ - - case h_matching_connection: - parser->index++; - if (parser->index > sizeof(CONNECTION)-1 - || c != CONNECTION[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(CONNECTION)-2) { - parser->header_state = h_connection; - } - break; - - /* proxy-connection */ - - case h_matching_proxy_connection: - parser->index++; - if (parser->index > sizeof(PROXY_CONNECTION)-1 - || c != PROXY_CONNECTION[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(PROXY_CONNECTION)-2) { - parser->header_state = h_connection; - } - break; - - /* content-length */ - - case h_matching_content_length: - parser->index++; - if (parser->index > sizeof(CONTENT_LENGTH)-1 - || c != CONTENT_LENGTH[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(CONTENT_LENGTH)-2) { - parser->header_state = h_content_length; - } - break; - - /* transfer-encoding */ - - case h_matching_transfer_encoding: - parser->index++; - if (parser->index > sizeof(TRANSFER_ENCODING)-1 - || c != TRANSFER_ENCODING[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(TRANSFER_ENCODING)-2) { - parser->header_state = h_transfer_encoding; - } - break; - - /* upgrade */ - - case h_matching_upgrade: - parser->index++; - if (parser->index > sizeof(UPGRADE)-1 - || c != UPGRADE[parser->index]) { - parser->header_state = h_general; - } else if (parser->index == sizeof(UPGRADE)-2) { - parser->header_state = h_upgrade; - } - break; - - case h_connection: - case h_content_length: - case h_transfer_encoding: - case h_upgrade: - if (ch != ' ') parser->header_state = h_general; - break; - - default: - assert(0 && "Unknown header_state"); - break; - } - } - - COUNT_HEADER_SIZE(p - start); - - if (p == data + len) { - --p; - break; - } - - if (ch == ':') { - UPDATE_STATE(s_header_value_discard_ws); - CALLBACK_DATA(header_field); - break; - } - - SET_ERRNO(HPE_INVALID_HEADER_TOKEN); - goto error; - } - - case s_header_value_discard_ws: - if (ch == ' ' || ch == '\t') break; - - if (ch == CR) { - UPDATE_STATE(s_header_value_discard_ws_almost_done); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_value_discard_lws); - break; - } - - /* FALLTHROUGH */ - - case s_header_value_start: - { - MARK(header_value); - - UPDATE_STATE(s_header_value); - parser->index = 0; - - c = LOWER(ch); - - switch (parser->header_state) { - case h_upgrade: - parser->flags |= F_UPGRADE; - parser->header_state = h_general; - break; - - case h_transfer_encoding: - /* looking for 'Transfer-Encoding: chunked' */ - if ('c' == c) { - parser->header_state = h_matching_transfer_encoding_chunked; - } else { - parser->header_state = h_general; - } - break; - - case h_content_length: - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - goto error; - } - - parser->content_length = ch - '0'; - break; - - case h_connection: - /* looking for 'Connection: keep-alive' */ - if (c == 'k') { - parser->header_state = h_matching_connection_keep_alive; - /* looking for 'Connection: close' */ - } else if (c == 'c') { - parser->header_state = h_matching_connection_close; - } else if (c == 'u') { - parser->header_state = h_matching_connection_upgrade; - } else { - parser->header_state = h_matching_connection_token; - } - break; - - /* Multi-value `Connection` header */ - case h_matching_connection_token_start: - break; - - default: - parser->header_state = h_general; - break; - } - break; - } - - case s_header_value: - { - const char* start = p; - enum header_states h_state = (enum header_states) parser->header_state; - for (; p != data + len; p++) { - ch = *p; - if (ch == CR) { - UPDATE_STATE(s_header_almost_done); - parser->header_state = h_state; - CALLBACK_DATA(header_value); - break; - } - - if (ch == LF) { - UPDATE_STATE(s_header_almost_done); - COUNT_HEADER_SIZE(p - start); - parser->header_state = h_state; - CALLBACK_DATA_NOADVANCE(header_value); - REEXECUTE(); - } - - c = LOWER(ch); - - switch (h_state) { - case h_general: - { - const char* p_cr; - const char* p_lf; - size_t limit = data + len - p; - - limit = MIN(limit, HTTP_MAX_HEADER_SIZE); - - p_cr = (const char*) memchr(p, CR, limit); - p_lf = (const char*) memchr(p, LF, limit); - if (p_cr != NULL) { - if (p_lf != NULL && p_cr >= p_lf) - p = p_lf; - else - p = p_cr; - } else if (UNLIKELY(p_lf != NULL)) { - p = p_lf; - } else { - p = data + len; - } - --p; - - break; - } - - case h_connection: - case h_transfer_encoding: - assert(0 && "Shouldn't get here."); - break; - - case h_content_length: - { - uint64_t t; - - if (ch == ' ') break; - - if (UNLIKELY(!IS_NUM(ch))) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - parser->header_state = h_state; - goto error; - } - - t = parser->content_length; - t *= 10; - t += ch - '0'; - - /* Overflow? Test against a conservative limit for simplicity. */ - if (UNLIKELY((ULLONG_MAX - 10) / 10 < parser->content_length)) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - parser->header_state = h_state; - goto error; - } - - parser->content_length = t; - break; - } - - /* Transfer-Encoding: chunked */ - case h_matching_transfer_encoding_chunked: - parser->index++; - if (parser->index > sizeof(CHUNKED)-1 - || c != CHUNKED[parser->index]) { - h_state = h_general; - } else if (parser->index == sizeof(CHUNKED)-2) { - h_state = h_transfer_encoding_chunked; - } - break; - - case h_matching_connection_token_start: - /* looking for 'Connection: keep-alive' */ - if (c == 'k') { - h_state = h_matching_connection_keep_alive; - /* looking for 'Connection: close' */ - } else if (c == 'c') { - h_state = h_matching_connection_close; - } else if (c == 'u') { - h_state = h_matching_connection_upgrade; - } else if (STRICT_TOKEN(c)) { - h_state = h_matching_connection_token; - } else if (c == ' ' || c == '\t') { - /* Skip lws */ - } else { - h_state = h_general; - } - break; - - /* looking for 'Connection: keep-alive' */ - case h_matching_connection_keep_alive: - parser->index++; - if (parser->index > sizeof(KEEP_ALIVE)-1 - || c != KEEP_ALIVE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(KEEP_ALIVE)-2) { - h_state = h_connection_keep_alive; - } - break; - - /* looking for 'Connection: close' */ - case h_matching_connection_close: - parser->index++; - if (parser->index > sizeof(CLOSE)-1 || c != CLOSE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(CLOSE)-2) { - h_state = h_connection_close; - } - break; - - /* looking for 'Connection: upgrade' */ - case h_matching_connection_upgrade: - parser->index++; - if (parser->index > sizeof(UPGRADE) - 1 || - c != UPGRADE[parser->index]) { - h_state = h_matching_connection_token; - } else if (parser->index == sizeof(UPGRADE)-2) { - h_state = h_connection_upgrade; - } - break; - - case h_matching_connection_token: - if (ch == ',') { - h_state = h_matching_connection_token_start; - parser->index = 0; - } - break; - - case h_transfer_encoding_chunked: - if (ch != ' ') h_state = h_general; - break; - - case h_connection_keep_alive: - case h_connection_close: - case h_connection_upgrade: - if (ch == ',') { - if (h_state == h_connection_keep_alive) { - parser->flags |= F_CONNECTION_KEEP_ALIVE; - } else if (h_state == h_connection_close) { - parser->flags |= F_CONNECTION_CLOSE; - } else if (h_state == h_connection_upgrade) { - parser->flags |= F_CONNECTION_UPGRADE; - } - h_state = h_matching_connection_token_start; - parser->index = 0; - } else if (ch != ' ') { - h_state = h_matching_connection_token; - } - break; - - default: - UPDATE_STATE(s_header_value); - h_state = h_general; - break; - } - } - parser->header_state = h_state; - - COUNT_HEADER_SIZE(p - start); - - if (p == data + len) - --p; - break; - } - - case s_header_almost_done: - { - STRICT_CHECK(ch != LF); - - UPDATE_STATE(s_header_value_lws); - break; - } - - case s_header_value_lws: - { - if (ch == ' ' || ch == '\t') { - UPDATE_STATE(s_header_value_start); - REEXECUTE(); - } - - /* finished the header */ - switch (parser->header_state) { - case h_connection_keep_alive: - parser->flags |= F_CONNECTION_KEEP_ALIVE; - break; - case h_connection_close: - parser->flags |= F_CONNECTION_CLOSE; - break; - case h_transfer_encoding_chunked: - parser->flags |= F_CHUNKED; - break; - case h_connection_upgrade: - parser->flags |= F_CONNECTION_UPGRADE; - break; - default: - break; - } - - UPDATE_STATE(s_header_field_start); - REEXECUTE(); - } - - case s_header_value_discard_ws_almost_done: - { - STRICT_CHECK(ch != LF); - UPDATE_STATE(s_header_value_discard_lws); - break; - } - - case s_header_value_discard_lws: - { - if (ch == ' ' || ch == '\t') { - UPDATE_STATE(s_header_value_discard_ws); - break; - } else { - switch (parser->header_state) { - case h_connection_keep_alive: - parser->flags |= F_CONNECTION_KEEP_ALIVE; - break; - case h_connection_close: - parser->flags |= F_CONNECTION_CLOSE; - break; - case h_connection_upgrade: - parser->flags |= F_CONNECTION_UPGRADE; - break; - case h_transfer_encoding_chunked: - parser->flags |= F_CHUNKED; - break; - default: - break; - } - - /* header value was empty */ - MARK(header_value); - UPDATE_STATE(s_header_field_start); - CALLBACK_DATA_NOADVANCE(header_value); - REEXECUTE(); - } - } - - case s_headers_almost_done: - { - STRICT_CHECK(ch != LF); - - if (parser->flags & F_TRAILING) { - /* End of a chunked request */ - UPDATE_STATE(s_message_done); - CALLBACK_NOTIFY_NOADVANCE(chunk_complete); - REEXECUTE(); - } - - UPDATE_STATE(s_headers_done); - - /* Set this here so that on_headers_complete() callbacks can see it */ - parser->upgrade = - ((parser->flags & (F_UPGRADE | F_CONNECTION_UPGRADE)) == - (F_UPGRADE | F_CONNECTION_UPGRADE) || - parser->method == HTTP_CONNECT); - - /* Here we call the headers_complete callback. This is somewhat - * different than other callbacks because if the user returns 1, we - * will interpret that as saying that this message has no body. This - * is needed for the annoying case of recieving a response to a HEAD - * request. - * - * We'd like to use CALLBACK_NOTIFY_NOADVANCE() here but we cannot, so - * we have to simulate it by handling a change in errno below. - */ - if (settings->on_headers_complete) { - switch (settings->on_headers_complete(parser)) { - case 0: - break; - - case 1: - parser->flags |= F_SKIPBODY; - break; - - default: - SET_ERRNO(HPE_CB_headers_complete); - RETURN(p - data); /* Error */ - } - } - - if (HTTP_PARSER_ERRNO(parser) != HPE_OK) { - RETURN(p - data); - } - - REEXECUTE(); - } - - case s_headers_done: - { - STRICT_CHECK(ch != LF); - - parser->nread = 0; - - int hasBody = parser->flags & F_CHUNKED || - (parser->content_length > 0 && parser->content_length != ULLONG_MAX); - if (parser->upgrade && (parser->method == HTTP_CONNECT || - (parser->flags & F_SKIPBODY) || !hasBody)) { - /* Exit, the rest of the message is in a different protocol. */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - RETURN((p - data) + 1); - } - - if (parser->flags & F_SKIPBODY) { - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else if (parser->flags & F_CHUNKED) { - /* chunked encoding - ignore Content-Length header */ - UPDATE_STATE(s_chunk_size_start); - } else { - if (parser->content_length == 0) { - /* Content-Length header given but zero: Content-Length: 0\r\n */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else if (parser->content_length != ULLONG_MAX) { - /* Content-Length header given and non-zero */ - UPDATE_STATE(s_body_identity); - } else { - if (parser->type == HTTP_REQUEST || - !http_message_needs_eof(parser)) { - /* Assume content-length 0 - read the next */ - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - } else { - /* Read body until EOF */ - UPDATE_STATE(s_body_identity_eof); - } - } - } - - break; - } - - case s_body_identity: - { - uint64_t to_read = MIN(parser->content_length, - (uint64_t) ((data + len) - p)); - - assert(parser->content_length != 0 - && parser->content_length != ULLONG_MAX); - - /* The difference between advancing content_length and p is because - * the latter will automaticaly advance on the next loop iteration. - * Further, if content_length ends up at 0, we want to see the last - * byte again for our message complete callback. - */ - MARK(body); - parser->content_length -= to_read; - p += to_read - 1; - - if (parser->content_length == 0) { - UPDATE_STATE(s_message_done); - - /* Mimic CALLBACK_DATA_NOADVANCE() but with one extra byte. - * - * The alternative to doing this is to wait for the next byte to - * trigger the data callback, just as in every other case. The - * problem with this is that this makes it difficult for the test - * harness to distinguish between complete-on-EOF and - * complete-on-length. It's not clear that this distinction is - * important for applications, but let's keep it for now. - */ - CALLBACK_DATA_(body, p - body_mark + 1, p - data); - REEXECUTE(); - } - - break; - } - - /* read until EOF */ - case s_body_identity_eof: - MARK(body); - p = data + len - 1; - - break; - - case s_message_done: - UPDATE_STATE(NEW_MESSAGE()); - CALLBACK_NOTIFY(message_complete); - if (parser->upgrade) { - /* Exit, the rest of the message is in a different protocol. */ - RETURN((p - data) + 1); - } - break; - - case s_chunk_size_start: - { - assert(parser->nread == 1); - assert(parser->flags & F_CHUNKED); - - unhex_val = unhex[(unsigned char)ch]; - if (UNLIKELY(unhex_val == -1)) { - SET_ERRNO(HPE_INVALID_CHUNK_SIZE); - goto error; - } - - parser->content_length = unhex_val; - UPDATE_STATE(s_chunk_size); - break; - } - - case s_chunk_size: - { - uint64_t t; - - assert(parser->flags & F_CHUNKED); - - if (ch == CR) { - UPDATE_STATE(s_chunk_size_almost_done); - break; - } - - unhex_val = unhex[(unsigned char)ch]; - - if (unhex_val == -1) { - if (ch == ';' || ch == ' ') { - UPDATE_STATE(s_chunk_parameters); - break; - } - - SET_ERRNO(HPE_INVALID_CHUNK_SIZE); - goto error; - } - - t = parser->content_length; - t *= 16; - t += unhex_val; - - /* Overflow? Test against a conservative limit for simplicity. */ - if (UNLIKELY((ULLONG_MAX - 16) / 16 < parser->content_length)) { - SET_ERRNO(HPE_INVALID_CONTENT_LENGTH); - goto error; - } - - parser->content_length = t; - break; - } - - case s_chunk_parameters: - { - assert(parser->flags & F_CHUNKED); - /* just ignore this shit. TODO check for overflow */ - if (ch == CR) { - UPDATE_STATE(s_chunk_size_almost_done); - break; - } - break; - } - - case s_chunk_size_almost_done: - { - assert(parser->flags & F_CHUNKED); - STRICT_CHECK(ch != LF); - - parser->nread = 0; - - if (parser->content_length == 0) { - parser->flags |= F_TRAILING; - UPDATE_STATE(s_header_field_start); - } else { - UPDATE_STATE(s_chunk_data); - } - CALLBACK_NOTIFY(chunk_header); - break; - } - - case s_chunk_data: - { - uint64_t to_read = MIN(parser->content_length, - (uint64_t) ((data + len) - p)); - - assert(parser->flags & F_CHUNKED); - assert(parser->content_length != 0 - && parser->content_length != ULLONG_MAX); - - /* See the explanation in s_body_identity for why the content - * length and data pointers are managed this way. - */ - MARK(body); - parser->content_length -= to_read; - p += to_read - 1; - - if (parser->content_length == 0) { - UPDATE_STATE(s_chunk_data_almost_done); - } - - break; - } - - case s_chunk_data_almost_done: - assert(parser->flags & F_CHUNKED); - assert(parser->content_length == 0); - STRICT_CHECK(ch != CR); - UPDATE_STATE(s_chunk_data_done); - CALLBACK_DATA(body); - break; - - case s_chunk_data_done: - assert(parser->flags & F_CHUNKED); - STRICT_CHECK(ch != LF); - parser->nread = 0; - UPDATE_STATE(s_chunk_size_start); - CALLBACK_NOTIFY(chunk_complete); - break; - - default: - assert(0 && "unhandled state"); - SET_ERRNO(HPE_INVALID_INTERNAL_STATE); - goto error; - } - } - - /* Run callbacks for any marks that we have leftover after we ran our of - * bytes. There should be at most one of these set, so it's OK to invoke - * them in series (unset marks will not result in callbacks). - * - * We use the NOADVANCE() variety of callbacks here because 'p' has already - * overflowed 'data' and this allows us to correct for the off-by-one that - * we'd otherwise have (since CALLBACK_DATA() is meant to be run with a 'p' - * value that's in-bounds). - */ - - assert(((header_field_mark ? 1 : 0) + - (header_value_mark ? 1 : 0) + - (url_mark ? 1 : 0) + - (body_mark ? 1 : 0) + - (status_mark ? 1 : 0)) <= 1); - - CALLBACK_DATA_NOADVANCE(header_field); - CALLBACK_DATA_NOADVANCE(header_value); - CALLBACK_DATA_NOADVANCE(url); - CALLBACK_DATA_NOADVANCE(body); - CALLBACK_DATA_NOADVANCE(status); - - RETURN(len); - -error: - if (HTTP_PARSER_ERRNO(parser) == HPE_OK) { - SET_ERRNO(HPE_UNKNOWN); - } - - RETURN(p - data); -} - - -/* Does the parser need to see an EOF to find the end of the message? */ -int -http_message_needs_eof (const http_parser *parser) -{ - if (parser->type == HTTP_REQUEST) { - return 0; - } - - /* See RFC 2616 section 4.4 */ - if (parser->status_code / 100 == 1 || /* 1xx e.g. Continue */ - parser->status_code == 204 || /* No Content */ - parser->status_code == 304 || /* Not Modified */ - parser->flags & F_SKIPBODY) { /* response to a HEAD request */ - return 0; - } - - if ((parser->flags & F_CHUNKED) || parser->content_length != ULLONG_MAX) { - return 0; - } - - return 1; -} - - -int -http_should_keep_alive (const http_parser *parser) -{ - if (parser->http_major > 0 && parser->http_minor > 0) { - /* HTTP/1.1 */ - if (parser->flags & F_CONNECTION_CLOSE) { - return 0; - } - } else { - /* HTTP/1.0 or earlier */ - if (!(parser->flags & F_CONNECTION_KEEP_ALIVE)) { - return 0; - } - } - - return !http_message_needs_eof(parser); -} - - -const char * -http_method_str (enum http_method m) -{ - return ELEM_AT(method_strings, m, ""); -} - - -void -http_parser_init (http_parser *parser, enum http_parser_type t) -{ - void *data = parser->data; /* preserve application data */ - memset(parser, 0, sizeof(*parser)); - parser->data = data; - parser->type = t; - parser->state = (t == HTTP_REQUEST ? s_start_req : (t == HTTP_RESPONSE ? s_start_res : s_start_req_or_res)); - parser->http_errno = HPE_OK; -} - -void -http_parser_settings_init(http_parser_settings *settings) -{ - memset(settings, 0, sizeof(*settings)); -} - -const char * -http_errno_name(enum http_errno err) { - assert(((size_t) err) < - (sizeof(http_strerror_tab) / sizeof(http_strerror_tab[0]))); - return http_strerror_tab[err].name; -} - -const char * -http_errno_description(enum http_errno err) { - assert(((size_t) err) < - (sizeof(http_strerror_tab) / sizeof(http_strerror_tab[0]))); - return http_strerror_tab[err].description; -} - -static enum http_host_state -http_parse_host_char(enum http_host_state s, const char ch) { - switch(s) { - case s_http_userinfo: - case s_http_userinfo_start: - if (ch == '@') { - return s_http_host_start; - } - - if (IS_USERINFO_CHAR(ch)) { - return s_http_userinfo; - } - break; - - case s_http_host_start: - if (ch == '[') { - return s_http_host_v6_start; - } - - if (IS_HOST_CHAR(ch)) { - return s_http_host; - } - - break; - - case s_http_host: - if (IS_HOST_CHAR(ch)) { - return s_http_host; - } - - /* FALLTHROUGH */ - case s_http_host_v6_end: - if (ch == ':') { - return s_http_host_port_start; - } - - break; - - case s_http_host_v6: - if (ch == ']') { - return s_http_host_v6_end; - } - - /* FALLTHROUGH */ - case s_http_host_v6_start: - if (IS_HEX(ch) || ch == ':' || ch == '.') { - return s_http_host_v6; - } - - break; - - case s_http_host_port: - case s_http_host_port_start: - if (IS_NUM(ch)) { - return s_http_host_port; - } - - break; - - default: - break; - } - return s_http_host_dead; -} - -static int -http_parse_host(const char * buf, struct http_parser_url *u, int found_at) { - enum http_host_state s; - - const char *p; - size_t buflen = u->field_data[UF_HOST].off + u->field_data[UF_HOST].len; - - u->field_data[UF_HOST].len = 0; - - s = found_at ? s_http_userinfo_start : s_http_host_start; - - for (p = buf + u->field_data[UF_HOST].off; p < buf + buflen; p++) { - enum http_host_state new_s = http_parse_host_char(s, *p); - - if (new_s == s_http_host_dead) { - return 1; - } - - switch(new_s) { - case s_http_host: - if (s != s_http_host) { - u->field_data[UF_HOST].off = p - buf; - } - u->field_data[UF_HOST].len++; - break; - - case s_http_host_v6: - if (s != s_http_host_v6) { - u->field_data[UF_HOST].off = p - buf; - } - u->field_data[UF_HOST].len++; - break; - - case s_http_host_port: - if (s != s_http_host_port) { - u->field_data[UF_PORT].off = p - buf; - u->field_data[UF_PORT].len = 0; - u->field_set |= (1 << UF_PORT); - } - u->field_data[UF_PORT].len++; - break; - - case s_http_userinfo: - if (s != s_http_userinfo) { - u->field_data[UF_USERINFO].off = p - buf ; - u->field_data[UF_USERINFO].len = 0; - u->field_set |= (1 << UF_USERINFO); - } - u->field_data[UF_USERINFO].len++; - break; - - default: - break; - } - s = new_s; - } - - /* Make sure we don't end somewhere unexpected */ - switch (s) { - case s_http_host_start: - case s_http_host_v6_start: - case s_http_host_v6: - case s_http_host_port_start: - case s_http_userinfo: - case s_http_userinfo_start: - return 1; - default: - break; - } - - return 0; -} - -int -http_parser_parse_url(const char *buf, size_t buflen, int is_connect, - struct http_parser_url *u) -{ - enum state s; - const char *p; - enum http_parser_url_fields uf, old_uf; - int found_at = 0; - - u->port = u->field_set = 0; - s = is_connect ? s_req_server_start : s_req_spaces_before_url; - old_uf = UF_MAX; - - for (p = buf; p < buf + buflen; p++) { - s = parse_url_char(s, *p); - - /* Figure out the next field that we're operating on */ - switch (s) { - case s_dead: - return 1; - - /* Skip delimeters */ - case s_req_schema_slash: - case s_req_schema_slash_slash: - case s_req_server_start: - case s_req_query_string_start: - case s_req_fragment_start: - continue; - - case s_req_schema: - uf = UF_SCHEMA; - break; - - case s_req_server_with_at: - found_at = 1; - - /* FALLTROUGH */ - case s_req_server: - uf = UF_HOST; - break; - - case s_req_path: - uf = UF_PATH; - break; - - case s_req_query_string: - uf = UF_QUERY; - break; - - case s_req_fragment: - uf = UF_FRAGMENT; - break; - - default: - assert(!"Unexpected state"); - return 1; - } - - /* Nothing's changed; soldier on */ - if (uf == old_uf) { - u->field_data[uf].len++; - continue; - } - - u->field_data[uf].off = p - buf; - u->field_data[uf].len = 1; - - u->field_set |= (1 << uf); - old_uf = uf; - } - - /* host must be present if there is a schema */ - /* parsing http:///toto will fail */ - if ((u->field_set & ((1 << UF_SCHEMA) | (1 << UF_HOST))) != 0) { - if (http_parse_host(buf, u, found_at) != 0) { - return 1; - } - } - - /* CONNECT requests can only contain "hostname:port" */ - if (is_connect && u->field_set != ((1 << UF_HOST)|(1 << UF_PORT))) { - return 1; - } - - if (u->field_set & (1 << UF_PORT)) { - /* Don't bother with endp; we've already validated the string */ - unsigned long v = strtoul(buf + u->field_data[UF_PORT].off, NULL, 10); - - /* Ports have a max value of 2^16 */ - if (v > 0xffff) { - return 1; - } - - u->port = (uint16_t) v; - } - - return 0; -} - -void -http_parser_pause(http_parser *parser, int paused) { - /* Users should only be pausing/unpausing a parser that is not in an error - * state. In non-debug builds, there's not much that we can do about this - * other than ignore it. - */ - if (HTTP_PARSER_ERRNO(parser) == HPE_OK || - HTTP_PARSER_ERRNO(parser) == HPE_PAUSED) { - SET_ERRNO((paused) ? HPE_PAUSED : HPE_OK); - } else { - assert(0 && "Attempting to pause parser in error state"); - } -} - -int -http_body_is_final(const struct http_parser *parser) { - return parser->state == s_message_done; -} - -unsigned long -http_parser_version(void) { - return HTTP_PARSER_VERSION_MAJOR * 0x10000 | - HTTP_PARSER_VERSION_MINOR * 0x00100 | - HTTP_PARSER_VERSION_PATCH * 0x00001; -} diff --git a/deps/http_parser/http_parser.gyp b/deps/http_parser/http_parser.gyp deleted file mode 100644 index ef34ecaeaeab45..00000000000000 --- a/deps/http_parser/http_parser.gyp +++ /dev/null @@ -1,111 +0,0 @@ -# This file is used with the GYP meta build system. -# http://code.google.com/p/gyp/ -# To build try this: -# svn co http://gyp.googlecode.com/svn/trunk gyp -# ./gyp/gyp -f make --depth=`pwd` http_parser.gyp -# ./out/Debug/test -{ - 'target_defaults': { - 'default_configuration': 'Debug', - 'configurations': { - # TODO: hoist these out and put them somewhere common, because - # RuntimeLibrary MUST MATCH across the entire project - 'Debug': { - 'defines': [ 'DEBUG', '_DEBUG' ], - 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 1, # static debug - }, - }, - }, - 'Release': { - 'defines': [ 'NDEBUG' ], - 'cflags': [ '-Wall', '-Wextra', '-O3' ], - 'msvs_settings': { - 'VCCLCompilerTool': { - 'RuntimeLibrary': 0, # static release - }, - }, - } - }, - 'msvs_settings': { - 'VCCLCompilerTool': { - }, - 'VCLibrarianTool': { - }, - 'VCLinkerTool': { - 'GenerateDebugInformation': 'true', - }, - }, - 'conditions': [ - ['OS == "win"', { - 'defines': [ - 'WIN32' - ], - }] - ], - }, - - 'targets': [ - { - 'target_name': 'http_parser', - 'type': 'static_library', - 'include_dirs': [ '.' ], - 'direct_dependent_settings': { - 'defines': [ 'HTTP_PARSER_STRICT=0' ], - 'include_dirs': [ '.' ], - }, - 'defines': [ 'HTTP_PARSER_STRICT=0' ], - 'sources': [ './http_parser.c', ], - 'conditions': [ - ['OS=="win"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - # Compile as C++. http_parser.c is actually C99, but C++ is - # close enough in this case. - 'CompileAs': 2, - }, - }, - }] - ], - }, - - { - 'target_name': 'http_parser_strict', - 'type': 'static_library', - 'include_dirs': [ '.' ], - 'direct_dependent_settings': { - 'defines': [ 'HTTP_PARSER_STRICT=1' ], - 'include_dirs': [ '.' ], - }, - 'defines': [ 'HTTP_PARSER_STRICT=1' ], - 'sources': [ './http_parser.c', ], - 'conditions': [ - ['OS=="win"', { - 'msvs_settings': { - 'VCCLCompilerTool': { - # Compile as C++. http_parser.c is actually C99, but C++ is - # close enough in this case. - 'CompileAs': 2, - }, - }, - }] - ], - }, - - { - 'target_name': 'test-nonstrict', - 'type': 'executable', - 'dependencies': [ 'http_parser' ], - 'sources': [ 'test.c' ] - }, - - { - 'target_name': 'test-strict', - 'type': 'executable', - 'dependencies': [ 'http_parser_strict' ], - 'sources': [ 'test.c' ] - } - ] -} diff --git a/deps/http_parser/http_parser.h b/deps/http_parser/http_parser.h deleted file mode 100644 index eb71bf99219315..00000000000000 --- a/deps/http_parser/http_parser.h +++ /dev/null @@ -1,342 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#ifndef http_parser_h -#define http_parser_h -#ifdef __cplusplus -extern "C" { -#endif - -/* Also update SONAME in the Makefile whenever you change these. */ -#define HTTP_PARSER_VERSION_MAJOR 2 -#define HTTP_PARSER_VERSION_MINOR 5 -#define HTTP_PARSER_VERSION_PATCH 0 - -#include -#if defined(_WIN32) && !defined(__MINGW32__) && (!defined(_MSC_VER) || _MSC_VER<1600) -#include -#include -typedef __int8 int8_t; -typedef unsigned __int8 uint8_t; -typedef __int16 int16_t; -typedef unsigned __int16 uint16_t; -typedef __int32 int32_t; -typedef unsigned __int32 uint32_t; -typedef __int64 int64_t; -typedef unsigned __int64 uint64_t; -#else -#include -#endif - -/* Compile with -DHTTP_PARSER_STRICT=0 to make less checks, but run - * faster - */ -#ifndef HTTP_PARSER_STRICT -# define HTTP_PARSER_STRICT 1 -#endif - -/* Maximium header size allowed. If the macro is not defined - * before including this header then the default is used. To - * change the maximum header size, define the macro in the build - * environment (e.g. -DHTTP_MAX_HEADER_SIZE=). To remove - * the effective limit on the size of the header, define the macro - * to a very large number (e.g. -DHTTP_MAX_HEADER_SIZE=0x7fffffff) - */ -#ifndef HTTP_MAX_HEADER_SIZE -# define HTTP_MAX_HEADER_SIZE (80*1024) -#endif - -typedef struct http_parser http_parser; -typedef struct http_parser_settings http_parser_settings; - - -/* Callbacks should return non-zero to indicate an error. The parser will - * then halt execution. - * - * The one exception is on_headers_complete. In a HTTP_RESPONSE parser - * returning '1' from on_headers_complete will tell the parser that it - * should not expect a body. This is used when receiving a response to a - * HEAD request which may contain 'Content-Length' or 'Transfer-Encoding: - * chunked' headers that indicate the presence of a body. - * - * http_data_cb does not return data chunks. It will be called arbitrarily - * many times for each string. E.G. you might get 10 callbacks for "on_url" - * each providing just a few characters more data. - */ -typedef int (*http_data_cb) (http_parser*, const char *at, size_t length); -typedef int (*http_cb) (http_parser*); - - -/* Request Methods */ -#define HTTP_METHOD_MAP(XX) \ - XX(0, DELETE, DELETE) \ - XX(1, GET, GET) \ - XX(2, HEAD, HEAD) \ - XX(3, POST, POST) \ - XX(4, PUT, PUT) \ - /* pathological */ \ - XX(5, CONNECT, CONNECT) \ - XX(6, OPTIONS, OPTIONS) \ - XX(7, TRACE, TRACE) \ - /* webdav */ \ - XX(8, COPY, COPY) \ - XX(9, LOCK, LOCK) \ - XX(10, MKCOL, MKCOL) \ - XX(11, MOVE, MOVE) \ - XX(12, PROPFIND, PROPFIND) \ - XX(13, PROPPATCH, PROPPATCH) \ - XX(14, SEARCH, SEARCH) \ - XX(15, UNLOCK, UNLOCK) \ - /* subversion */ \ - XX(16, REPORT, REPORT) \ - XX(17, MKACTIVITY, MKACTIVITY) \ - XX(18, CHECKOUT, CHECKOUT) \ - XX(19, MERGE, MERGE) \ - /* upnp */ \ - XX(20, MSEARCH, M-SEARCH) \ - XX(21, NOTIFY, NOTIFY) \ - XX(22, SUBSCRIBE, SUBSCRIBE) \ - XX(23, UNSUBSCRIBE, UNSUBSCRIBE) \ - /* RFC-5789 */ \ - XX(24, PATCH, PATCH) \ - XX(25, PURGE, PURGE) \ - /* CalDAV */ \ - XX(26, MKCALENDAR, MKCALENDAR) \ - -enum http_method - { -#define XX(num, name, string) HTTP_##name = num, - HTTP_METHOD_MAP(XX) -#undef XX - }; - - -enum http_parser_type { HTTP_REQUEST, HTTP_RESPONSE, HTTP_BOTH }; - - -/* Flag values for http_parser.flags field */ -enum flags - { F_CHUNKED = 1 << 0 - , F_CONNECTION_KEEP_ALIVE = 1 << 1 - , F_CONNECTION_CLOSE = 1 << 2 - , F_CONNECTION_UPGRADE = 1 << 3 - , F_TRAILING = 1 << 4 - , F_UPGRADE = 1 << 5 - , F_SKIPBODY = 1 << 6 - }; - - -/* Map for errno-related constants - * - * The provided argument should be a macro that takes 2 arguments. - */ -#define HTTP_ERRNO_MAP(XX) \ - /* No error */ \ - XX(OK, "success") \ - \ - /* Callback-related errors */ \ - XX(CB_message_begin, "the on_message_begin callback failed") \ - XX(CB_url, "the on_url callback failed") \ - XX(CB_header_field, "the on_header_field callback failed") \ - XX(CB_header_value, "the on_header_value callback failed") \ - XX(CB_headers_complete, "the on_headers_complete callback failed") \ - XX(CB_body, "the on_body callback failed") \ - XX(CB_message_complete, "the on_message_complete callback failed") \ - XX(CB_status, "the on_status callback failed") \ - XX(CB_chunk_header, "the on_chunk_header callback failed") \ - XX(CB_chunk_complete, "the on_chunk_complete callback failed") \ - \ - /* Parsing-related errors */ \ - XX(INVALID_EOF_STATE, "stream ended at an unexpected time") \ - XX(HEADER_OVERFLOW, \ - "too many header bytes seen; overflow detected") \ - XX(CLOSED_CONNECTION, \ - "data received after completed connection: close message") \ - XX(INVALID_VERSION, "invalid HTTP version") \ - XX(INVALID_STATUS, "invalid HTTP status code") \ - XX(INVALID_METHOD, "invalid HTTP method") \ - XX(INVALID_URL, "invalid URL") \ - XX(INVALID_HOST, "invalid host") \ - XX(INVALID_PORT, "invalid port") \ - XX(INVALID_PATH, "invalid path") \ - XX(INVALID_QUERY_STRING, "invalid query string") \ - XX(INVALID_FRAGMENT, "invalid fragment") \ - XX(LF_EXPECTED, "LF character expected") \ - XX(INVALID_HEADER_TOKEN, "invalid character in header") \ - XX(INVALID_CONTENT_LENGTH, \ - "invalid character in content-length header") \ - XX(INVALID_CHUNK_SIZE, \ - "invalid character in chunk size header") \ - XX(INVALID_CONSTANT, "invalid constant string") \ - XX(INVALID_INTERNAL_STATE, "encountered unexpected internal state")\ - XX(STRICT, "strict mode assertion failed") \ - XX(PAUSED, "parser is paused") \ - XX(UNKNOWN, "an unknown error occurred") - - -/* Define HPE_* values for each errno value above */ -#define HTTP_ERRNO_GEN(n, s) HPE_##n, -enum http_errno { - HTTP_ERRNO_MAP(HTTP_ERRNO_GEN) -}; -#undef HTTP_ERRNO_GEN - - -/* Get an http_errno value from an http_parser */ -#define HTTP_PARSER_ERRNO(p) ((enum http_errno) (p)->http_errno) - - -struct http_parser { - /** PRIVATE **/ - unsigned int type : 2; /* enum http_parser_type */ - unsigned int flags : 7; /* F_* values from 'flags' enum; semi-public */ - unsigned int state : 7; /* enum state from http_parser.c */ - unsigned int header_state : 8; /* enum header_state from http_parser.c */ - unsigned int index : 8; /* index into current matcher */ - - uint32_t nread; /* # bytes read in various scenarios */ - uint64_t content_length; /* # bytes in body (0 if no Content-Length header) */ - - /** READ-ONLY **/ - unsigned short http_major; - unsigned short http_minor; - unsigned int status_code : 16; /* responses only */ - unsigned int method : 8; /* requests only */ - unsigned int http_errno : 7; - - /* 1 = Upgrade header was present and the parser has exited because of that. - * 0 = No upgrade header present. - * Should be checked when http_parser_execute() returns in addition to - * error checking. - */ - unsigned int upgrade : 1; - - /** PUBLIC **/ - void *data; /* A pointer to get hook to the "connection" or "socket" object */ -}; - - -struct http_parser_settings { - http_cb on_message_begin; - http_data_cb on_url; - http_data_cb on_status; - http_data_cb on_header_field; - http_data_cb on_header_value; - http_cb on_headers_complete; - http_data_cb on_body; - http_cb on_message_complete; - /* When on_chunk_header is called, the current chunk length is stored - * in parser->content_length. - */ - http_cb on_chunk_header; - http_cb on_chunk_complete; -}; - - -enum http_parser_url_fields - { UF_SCHEMA = 0 - , UF_HOST = 1 - , UF_PORT = 2 - , UF_PATH = 3 - , UF_QUERY = 4 - , UF_FRAGMENT = 5 - , UF_USERINFO = 6 - , UF_MAX = 7 - }; - - -/* Result structure for http_parser_parse_url(). - * - * Callers should index into field_data[] with UF_* values iff field_set - * has the relevant (1 << UF_*) bit set. As a courtesy to clients (and - * because we probably have padding left over), we convert any port to - * a uint16_t. - */ -struct http_parser_url { - uint16_t field_set; /* Bitmask of (1 << UF_*) values */ - uint16_t port; /* Converted UF_PORT string */ - - struct { - uint16_t off; /* Offset into buffer in which field starts */ - uint16_t len; /* Length of run in buffer */ - } field_data[UF_MAX]; -}; - - -/* Returns the library version. Bits 16-23 contain the major version number, - * bits 8-15 the minor version number and bits 0-7 the patch level. - * Usage example: - * - * unsigned long version = http_parser_version(); - * unsigned major = (version >> 16) & 255; - * unsigned minor = (version >> 8) & 255; - * unsigned patch = version & 255; - * printf("http_parser v%u.%u.%u\n", major, minor, patch); - */ -unsigned long http_parser_version(void); - -void http_parser_init(http_parser *parser, enum http_parser_type type); - - -/* Initialize http_parser_settings members to 0 - */ -void http_parser_settings_init(http_parser_settings *settings); - - -/* Executes the parser. Returns number of parsed bytes. Sets - * `parser->http_errno` on error. */ -size_t http_parser_execute(http_parser *parser, - const http_parser_settings *settings, - const char *data, - size_t len); - - -/* If http_should_keep_alive() in the on_headers_complete or - * on_message_complete callback returns 0, then this should be - * the last message on the connection. - * If you are the server, respond with the "Connection: close" header. - * If you are the client, close the connection. - */ -int http_should_keep_alive(const http_parser *parser); - -/* Returns a string version of the HTTP method. */ -const char *http_method_str(enum http_method m); - -/* Return a string name of the given error */ -const char *http_errno_name(enum http_errno err); - -/* Return a string description of the given error */ -const char *http_errno_description(enum http_errno err); - -/* Parse a URL; return nonzero on failure */ -int http_parser_parse_url(const char *buf, size_t buflen, - int is_connect, - struct http_parser_url *u); - -/* Pause or un-pause the parser; a nonzero value pauses */ -void http_parser_pause(http_parser *parser, int paused); - -/* Checks if this is the final chunk of the body. */ -int http_body_is_final(const http_parser *parser); - -#ifdef __cplusplus -} -#endif -#endif diff --git a/deps/http_parser/test.c b/deps/http_parser/test.c deleted file mode 100644 index 4c00571eba60bc..00000000000000 --- a/deps/http_parser/test.c +++ /dev/null @@ -1,3852 +0,0 @@ -/* Copyright Joyent, Inc. and other Node contributors. All rights reserved. - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - */ -#include "http_parser.h" -#include -#include -#include -#include /* rand */ -#include -#include - -#if defined(__APPLE__) -# undef strlcat -# undef strlncpy -# undef strlcpy -#endif /* defined(__APPLE__) */ - -#undef TRUE -#define TRUE 1 -#undef FALSE -#define FALSE 0 - -#define MAX_HEADERS 13 -#define MAX_ELEMENT_SIZE 2048 -#define MAX_CHUNKS 16 - -#define MIN(a,b) ((a) < (b) ? (a) : (b)) - -static http_parser *parser; - -struct message { - const char *name; // for debugging purposes - const char *raw; - enum http_parser_type type; - enum http_method method; - int status_code; - char response_status[MAX_ELEMENT_SIZE]; - char request_path[MAX_ELEMENT_SIZE]; - char request_url[MAX_ELEMENT_SIZE]; - char fragment[MAX_ELEMENT_SIZE]; - char query_string[MAX_ELEMENT_SIZE]; - char body[MAX_ELEMENT_SIZE]; - size_t body_size; - const char *host; - const char *userinfo; - uint16_t port; - int num_headers; - enum { NONE=0, FIELD, VALUE } last_header_element; - char headers [MAX_HEADERS][2][MAX_ELEMENT_SIZE]; - int should_keep_alive; - - int num_chunks; - int num_chunks_complete; - int chunk_lengths[MAX_CHUNKS]; - - const char *upgrade; // upgraded body - - unsigned short http_major; - unsigned short http_minor; - - int message_begin_cb_called; - int headers_complete_cb_called; - int message_complete_cb_called; - int message_complete_on_eof; - int body_is_final; -}; - -static int currently_parsing_eof; - -static struct message messages[5]; -static int num_messages; -static http_parser_settings *current_pause_parser; - -/* * R E Q U E S T S * */ -const struct message requests[] = -#define CURL_GET 0 -{ {.name= "curl get" - ,.type= HTTP_REQUEST - ,.raw= "GET /test HTTP/1.1\r\n" - "User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1\r\n" - "Host: 0.0.0.0=5000\r\n" - "Accept: */*\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 3 - ,.headers= - { { "User-Agent", "curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1" } - , { "Host", "0.0.0.0=5000" } - , { "Accept", "*/*" } - } - ,.body= "" - } - -#define FIREFOX_GET 1 -, {.name= "firefox get" - ,.type= HTTP_REQUEST - ,.raw= "GET /favicon.ico HTTP/1.1\r\n" - "Host: 0.0.0.0=5000\r\n" - "User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0\r\n" - "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n" - "Accept-Language: en-us,en;q=0.5\r\n" - "Accept-Encoding: gzip,deflate\r\n" - "Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n" - "Keep-Alive: 300\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/favicon.ico" - ,.request_url= "/favicon.ico" - ,.num_headers= 8 - ,.headers= - { { "Host", "0.0.0.0=5000" } - , { "User-Agent", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0" } - , { "Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" } - , { "Accept-Language", "en-us,en;q=0.5" } - , { "Accept-Encoding", "gzip,deflate" } - , { "Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7" } - , { "Keep-Alive", "300" } - , { "Connection", "keep-alive" } - } - ,.body= "" - } - -#define DUMBFUCK 2 -, {.name= "dumbfuck" - ,.type= HTTP_REQUEST - ,.raw= "GET /dumbfuck HTTP/1.1\r\n" - "aaaaaaaaaaaaa:++++++++++\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/dumbfuck" - ,.request_url= "/dumbfuck" - ,.num_headers= 1 - ,.headers= - { { "aaaaaaaaaaaaa", "++++++++++" } - } - ,.body= "" - } - -#define FRAGMENT_IN_URI 3 -, {.name= "fragment in url" - ,.type= HTTP_REQUEST - ,.raw= "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "page=1" - ,.fragment= "posts-17408" - ,.request_path= "/forums/1/topics/2375" - /* XXX request url does include fragment? */ - ,.request_url= "/forums/1/topics/2375?page=1#posts-17408" - ,.num_headers= 0 - ,.body= "" - } - -#define GET_NO_HEADERS_NO_BODY 4 -, {.name= "get no headers no body" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_no_headers_no_body/world HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE /* would need Connection: close */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_no_headers_no_body/world" - ,.request_url= "/get_no_headers_no_body/world" - ,.num_headers= 0 - ,.body= "" - } - -#define GET_ONE_HEADER_NO_BODY 5 -, {.name= "get one header no body" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_one_header_no_body HTTP/1.1\r\n" - "Accept: */*\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE /* would need Connection: close */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_one_header_no_body" - ,.request_url= "/get_one_header_no_body" - ,.num_headers= 1 - ,.headers= - { { "Accept" , "*/*" } - } - ,.body= "" - } - -#define GET_FUNKY_CONTENT_LENGTH 6 -, {.name= "get funky content length body hello" - ,.type= HTTP_REQUEST - ,.raw= "GET /get_funky_content_length_body_hello HTTP/1.0\r\n" - "conTENT-Length: 5\r\n" - "\r\n" - "HELLO" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/get_funky_content_length_body_hello" - ,.request_url= "/get_funky_content_length_body_hello" - ,.num_headers= 1 - ,.headers= - { { "conTENT-Length" , "5" } - } - ,.body= "HELLO" - } - -#define POST_IDENTITY_BODY_WORLD 7 -, {.name= "post identity body world" - ,.type= HTTP_REQUEST - ,.raw= "POST /post_identity_body_world?q=search#hey HTTP/1.1\r\n" - "Accept: */*\r\n" - "Transfer-Encoding: identity\r\n" - "Content-Length: 5\r\n" - "\r\n" - "World" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "q=search" - ,.fragment= "hey" - ,.request_path= "/post_identity_body_world" - ,.request_url= "/post_identity_body_world?q=search#hey" - ,.num_headers= 3 - ,.headers= - { { "Accept", "*/*" } - , { "Transfer-Encoding", "identity" } - , { "Content-Length", "5" } - } - ,.body= "World" - } - -#define POST_CHUNKED_ALL_YOUR_BASE 8 -, {.name= "post - chunked body: all your base are belong to us" - ,.type= HTTP_REQUEST - ,.raw= "POST /post_chunked_all_your_base HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "1e\r\nall your base are belong to us\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/post_chunked_all_your_base" - ,.request_url= "/post_chunked_all_your_base" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding" , "chunked" } - } - ,.body= "all your base are belong to us" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 0x1e } - } - -#define TWO_CHUNKS_MULT_ZERO_END 9 -, {.name= "two chunks ; triple zero ending" - ,.type= HTTP_REQUEST - ,.raw= "POST /two_chunks_mult_zero_end HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5\r\nhello\r\n" - "6\r\n world\r\n" - "000\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/two_chunks_mult_zero_end" - ,.request_url= "/two_chunks_mult_zero_end" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define CHUNKED_W_TRAILING_HEADERS 10 -, {.name= "chunked with trailing headers. blech." - ,.type= HTTP_REQUEST - ,.raw= "POST /chunked_w_trailing_headers HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5\r\nhello\r\n" - "6\r\n world\r\n" - "0\r\n" - "Vary: *\r\n" - "Content-Type: text/plain\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/chunked_w_trailing_headers" - ,.request_url= "/chunked_w_trailing_headers" - ,.num_headers= 3 - ,.headers= - { { "Transfer-Encoding", "chunked" } - , { "Vary", "*" } - , { "Content-Type", "text/plain" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define CHUNKED_W_BULLSHIT_AFTER_LENGTH 11 -, {.name= "with bullshit after the length" - ,.type= HTTP_REQUEST - ,.raw= "POST /chunked_w_bullshit_after_length HTTP/1.1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "5; ihatew3;whatthefuck=aretheseparametersfor\r\nhello\r\n" - "6; blahblah; blah\r\n world\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/chunked_w_bullshit_after_length" - ,.request_url= "/chunked_w_bullshit_after_length" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body= "hello world" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 5, 6 } - } - -#define WITH_QUOTES 12 -, {.name= "with quotes" - ,.type= HTTP_REQUEST - ,.raw= "GET /with_\"stupid\"_quotes?foo=\"bar\" HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "foo=\"bar\"" - ,.fragment= "" - ,.request_path= "/with_\"stupid\"_quotes" - ,.request_url= "/with_\"stupid\"_quotes?foo=\"bar\"" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define APACHEBENCH_GET 13 -/* The server receiving this request SHOULD NOT wait for EOF - * to know that content-length == 0. - * How to represent this in a unit test? message_complete_on_eof - * Compare with NO_CONTENT_LENGTH_RESPONSE. - */ -, {.name = "apachebench get" - ,.type= HTTP_REQUEST - ,.raw= "GET /test HTTP/1.0\r\n" - "Host: 0.0.0.0:5000\r\n" - "User-Agent: ApacheBench/2.3\r\n" - "Accept: */*\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 3 - ,.headers= { { "Host", "0.0.0.0:5000" } - , { "User-Agent", "ApacheBench/2.3" } - , { "Accept", "*/*" } - } - ,.body= "" - } - -#define QUERY_URL_WITH_QUESTION_MARK_GET 14 -/* Some clients include '?' characters in query strings. - */ -, {.name = "query url with question mark" - ,.type= HTTP_REQUEST - ,.raw= "GET /test.cgi?foo=bar?baz HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "foo=bar?baz" - ,.fragment= "" - ,.request_path= "/test.cgi" - ,.request_url= "/test.cgi?foo=bar?baz" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define PREFIX_NEWLINE_GET 15 -/* Some clients, especially after a POST in a keep-alive connection, - * will send an extra CRLF before the next request - */ -, {.name = "newline prefix get" - ,.type= HTTP_REQUEST - ,.raw= "\r\nGET /test HTTP/1.1\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define UPGRADE_REQUEST 16 -, {.name = "upgrade request" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Upgrade\r\n" - "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" - "Sec-WebSocket-Protocol: sample\r\n" - "Upgrade: WebSocket\r\n" - "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" - "Origin: http://example.com\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 7 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Upgrade" } - , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } - , { "Sec-WebSocket-Protocol", "sample" } - , { "Upgrade", "WebSocket" } - , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } - , { "Origin", "http://example.com" } - } - ,.body= "" - } - -#define CONNECT_REQUEST 17 -, {.name = "connect request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT 0-home0.netscape.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - "some data\r\n" - "and yet even more data" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "0-home0.netscape.com:443" - ,.num_headers= 2 - ,.upgrade="some data\r\nand yet even more data" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } - -#define REPORT_REQ 18 -, {.name= "report request" - ,.type= HTTP_REQUEST - ,.raw= "REPORT /test HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_REPORT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/test" - ,.request_url= "/test" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define NO_HTTP_VERSION 19 -, {.name= "request with no http version" - ,.type= HTTP_REQUEST - ,.raw= "GET /\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 0 - ,.http_minor= 9 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define MSEARCH_REQ 20 -, {.name= "m-search request" - ,.type= HTTP_REQUEST - ,.raw= "M-SEARCH * HTTP/1.1\r\n" - "HOST: 239.255.255.250:1900\r\n" - "MAN: \"ssdp:discover\"\r\n" - "ST: \"ssdp:all\"\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_MSEARCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "*" - ,.request_url= "*" - ,.num_headers= 3 - ,.headers= { { "HOST", "239.255.255.250:1900" } - , { "MAN", "\"ssdp:discover\"" } - , { "ST", "\"ssdp:all\"" } - } - ,.body= "" - } - -#define LINE_FOLDING_IN_HEADER 21 -, {.name= "line folding in header value" - ,.type= HTTP_REQUEST - ,.raw= "GET / HTTP/1.1\r\n" - "Line1: abc\r\n" - "\tdef\r\n" - " ghi\r\n" - "\t\tjkl\r\n" - " mno \r\n" - "\t \tqrs\r\n" - "Line2: \t line2\t\r\n" - "Line3:\r\n" - " line3\r\n" - "Line4: \r\n" - " \r\n" - "Connection:\r\n" - " close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 5 - ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } - , { "Line2", "line2\t" } - , { "Line3", "line3" } - , { "Line4", "" } - , { "Connection", "close" }, - } - ,.body= "" - } - - -#define QUERY_TERMINATED_HOST 22 -, {.name= "host terminated by a query string" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org?hail=all HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "hail=all" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org?hail=all" - ,.host= "hypnotoad.org" - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define QUERY_TERMINATED_HOSTPORT 23 -, {.name= "host:port terminated by a query string" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org:1234?hail=all HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "hail=all" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org:1234?hail=all" - ,.host= "hypnotoad.org" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define SPACE_TERMINATED_HOSTPORT 24 -, {.name= "host:port terminated by a space" - ,.type= HTTP_REQUEST - ,.raw= "GET http://hypnotoad.org:1234 HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "http://hypnotoad.org:1234" - ,.host= "hypnotoad.org" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define PATCH_REQ 25 -, {.name = "PATCH request" - ,.type= HTTP_REQUEST - ,.raw= "PATCH /file.txt HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/example\r\n" - "If-Match: \"e0023aa4e\"\r\n" - "Content-Length: 10\r\n" - "\r\n" - "cccccccccc" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_PATCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/file.txt" - ,.request_url= "/file.txt" - ,.num_headers= 4 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/example" } - , { "If-Match", "\"e0023aa4e\"" } - , { "Content-Length", "10" } - } - ,.body= "cccccccccc" - } - -#define CONNECT_CAPS_REQUEST 26 -, {.name = "connect caps request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "HOME0.NETSCAPE.COM:443" - ,.num_headers= 2 - ,.upgrade="" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } - -#if !HTTP_PARSER_STRICT -#define UTF8_PATH_REQ 27 -, {.name= "utf-8 path request" - ,.type= HTTP_REQUEST - ,.raw= "GET /δ¶/δt/pope?q=1#narf HTTP/1.1\r\n" - "Host: github.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "q=1" - ,.fragment= "narf" - ,.request_path= "/δ¶/δt/pope" - ,.request_url= "/δ¶/δt/pope?q=1#narf" - ,.num_headers= 1 - ,.headers= { {"Host", "github.com" } - } - ,.body= "" - } - -#define HOSTNAME_UNDERSCORE 28 -, {.name = "hostname underscore" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT home_0.netscape.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.query_string= "" - ,.fragment= "" - ,.request_path= "" - ,.request_url= "home_0.netscape.com:443" - ,.num_headers= 2 - ,.upgrade="" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - } - ,.body= "" - } -#endif /* !HTTP_PARSER_STRICT */ - -/* see https://github.com/ry/http-parser/issues/47 */ -#define EAT_TRAILING_CRLF_NO_CONNECTION_CLOSE 29 -, {.name = "eat CRLF between requests, no \"Connection: close\" header" - ,.raw= "POST / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/x-www-form-urlencoded\r\n" - "Content-Length: 4\r\n" - "\r\n" - "q=42\r\n" /* note the trailing CRLF */ - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 3 - ,.upgrade= 0 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/x-www-form-urlencoded" } - , { "Content-Length", "4" } - } - ,.body= "q=42" - } - -/* see https://github.com/ry/http-parser/issues/47 */ -#define EAT_TRAILING_CRLF_WITH_CONNECTION_CLOSE 30 -, {.name = "eat CRLF between requests even if \"Connection: close\" is set" - ,.raw= "POST / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Content-Type: application/x-www-form-urlencoded\r\n" - "Content-Length: 4\r\n" - "Connection: close\r\n" - "\r\n" - "q=42\r\n" /* note the trailing CRLF */ - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE /* input buffer isn't empty when on_message_complete is called */ - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 4 - ,.upgrade= 0 - ,.headers= { { "Host", "www.example.com" } - , { "Content-Type", "application/x-www-form-urlencoded" } - , { "Content-Length", "4" } - , { "Connection", "close" } - } - ,.body= "q=42" - } - -#define PURGE_REQ 31 -, {.name = "PURGE request" - ,.type= HTTP_REQUEST - ,.raw= "PURGE /file.txt HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_PURGE - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/file.txt" - ,.request_url= "/file.txt" - ,.num_headers= 1 - ,.headers= { { "Host", "www.example.com" } } - ,.body= "" - } - -#define SEARCH_REQ 32 -, {.name = "SEARCH request" - ,.type= HTTP_REQUEST - ,.raw= "SEARCH / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_SEARCH - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 1 - ,.headers= { { "Host", "www.example.com" } } - ,.body= "" - } - -#define PROXY_WITH_BASIC_AUTH 33 -, {.name= "host:port and basic_auth" - ,.type= HTTP_REQUEST - ,.raw= "GET http://a%12:b!&*$@hypnotoad.org:1234/toto HTTP/1.1\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.fragment= "" - ,.request_path= "/toto" - ,.request_url= "http://a%12:b!&*$@hypnotoad.org:1234/toto" - ,.host= "hypnotoad.org" - ,.userinfo= "a%12:b!&*$" - ,.port= 1234 - ,.num_headers= 0 - ,.headers= { } - ,.body= "" - } - -#define LINE_FOLDING_IN_HEADER_WITH_LF 34 -, {.name= "line folding in header value" - ,.type= HTTP_REQUEST - ,.raw= "GET / HTTP/1.1\n" - "Line1: abc\n" - "\tdef\n" - " ghi\n" - "\t\tjkl\n" - " mno \n" - "\t \tqrs\n" - "Line2: \t line2\t\n" - "Line3:\n" - " line3\n" - "Line4: \n" - " \n" - "Connection:\n" - " close\n" - "\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/" - ,.request_url= "/" - ,.num_headers= 5 - ,.headers= { { "Line1", "abc\tdef ghi\t\tjkl mno \t \tqrs" } - , { "Line2", "line2\t" } - , { "Line3", "line3" } - , { "Line4", "" } - , { "Connection", "close" }, - } - ,.body= "" - } - -#define CONNECTION_MULTI 35 -, {.name = "multiple connection header values with folding" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Something,\r\n" - " Upgrade, ,Keep-Alive\r\n" - "Sec-WebSocket-Key2: 12998 5 Y3 1 .P00\r\n" - "Sec-WebSocket-Protocol: sample\r\n" - "Upgrade: WebSocket\r\n" - "Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5\r\n" - "Origin: http://example.com\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 7 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Something, Upgrade, ,Keep-Alive" } - , { "Sec-WebSocket-Key2", "12998 5 Y3 1 .P00" } - , { "Sec-WebSocket-Protocol", "sample" } - , { "Upgrade", "WebSocket" } - , { "Sec-WebSocket-Key1", "4 @1 46546xW%0l 1 5" } - , { "Origin", "http://example.com" } - } - ,.body= "" - } - -#define CONNECTION_MULTI_LWS 36 -, {.name = "multiple connection header values with folding and lws" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Connection: keep-alive, upgrade\r\n" - "Upgrade: WebSocket\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 2 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Connection", "keep-alive, upgrade" } - , { "Upgrade", "WebSocket" } - } - ,.body= "" - } - -#define CONNECTION_MULTI_LWS_CRLF 37 -, {.name = "multiple connection header values with folding and lws" - ,.type= HTTP_REQUEST - ,.raw= "GET /demo HTTP/1.1\r\n" - "Connection: keep-alive, \r\n upgrade\r\n" - "Upgrade: WebSocket\r\n" - "\r\n" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_GET - ,.query_string= "" - ,.fragment= "" - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 2 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Connection", "keep-alive, upgrade" } - , { "Upgrade", "WebSocket" } - } - ,.body= "" - } - -#define UPGRADE_POST_REQUEST 38 -, {.name = "upgrade post request" - ,.type= HTTP_REQUEST - ,.raw= "POST /demo HTTP/1.1\r\n" - "Host: example.com\r\n" - "Connection: Upgrade\r\n" - "Upgrade: HTTP/2.0\r\n" - "Content-Length: 15\r\n" - "\r\n" - "sweet post body" - "Hot diggity dogg" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.method= HTTP_POST - ,.request_path= "/demo" - ,.request_url= "/demo" - ,.num_headers= 4 - ,.upgrade="Hot diggity dogg" - ,.headers= { { "Host", "example.com" } - , { "Connection", "Upgrade" } - , { "Upgrade", "HTTP/2.0" } - , { "Content-Length", "15" } - } - ,.body= "sweet post body" - } - -#define CONNECT_WITH_BODY_REQUEST 39 -, {.name = "connect with body request" - ,.type= HTTP_REQUEST - ,.raw= "CONNECT foo.bar.com:443 HTTP/1.0\r\n" - "User-agent: Mozilla/1.1N\r\n" - "Proxy-authorization: basic aGVsbG86d29ybGQ=\r\n" - "Content-Length: 10\r\n" - "\r\n" - "blarfcicle" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.method= HTTP_CONNECT - ,.request_url= "foo.bar.com:443" - ,.num_headers= 3 - ,.upgrade="blarfcicle" - ,.headers= { { "User-agent", "Mozilla/1.1N" } - , { "Proxy-authorization", "basic aGVsbG86d29ybGQ=" } - , { "Content-Length", "10" } - } - ,.body= "" - } - -, {.name= NULL } /* sentinel */ -}; - -/* * R E S P O N S E S * */ -const struct message responses[] = -#define GOOGLE_301 0 -{ {.name= "google 301" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301 Moved Permanently\r\n" - "Location: http://www.google.com/\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Date: Sun, 26 Apr 2009 11:11:49 GMT\r\n" - "Expires: Tue, 26 May 2009 11:11:49 GMT\r\n" - "X-$PrototypeBI-Version: 1.6.0.3\r\n" /* $ char in header field */ - "Cache-Control: public, max-age=2592000\r\n" - "Server: gws\r\n" - "Content-Length: 219 \r\n" - "\r\n" - "\n" - "301 Moved\n" - "

301 Moved

\n" - "The document has moved\n" - "here.\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "Moved Permanently" - ,.num_headers= 8 - ,.headers= - { { "Location", "http://www.google.com/" } - , { "Content-Type", "text/html; charset=UTF-8" } - , { "Date", "Sun, 26 Apr 2009 11:11:49 GMT" } - , { "Expires", "Tue, 26 May 2009 11:11:49 GMT" } - , { "X-$PrototypeBI-Version", "1.6.0.3" } - , { "Cache-Control", "public, max-age=2592000" } - , { "Server", "gws" } - , { "Content-Length", "219 " } - } - ,.body= "\n" - "301 Moved\n" - "

301 Moved

\n" - "The document has moved\n" - "here.\r\n" - "\r\n" - } - -#define NO_CONTENT_LENGTH_RESPONSE 1 -/* The client should wait for the server's EOF. That is, when content-length - * is not specified, and "Connection: close", the end of body is specified - * by the EOF. - * Compare with APACHEBENCH_GET - */ -, {.name= "no content-length response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Date: Tue, 04 Aug 2009 07:59:32 GMT\r\n" - "Server: Apache\r\n" - "X-Powered-By: Servlet/2.5 JSP/2.1\r\n" - "Content-Type: text/xml; charset=utf-8\r\n" - "Connection: close\r\n" - "\r\n" - "\n" - "\n" - " \n" - " \n" - " SOAP-ENV:Client\n" - " Client Error\n" - " \n" - " \n" - "" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 5 - ,.headers= - { { "Date", "Tue, 04 Aug 2009 07:59:32 GMT" } - , { "Server", "Apache" } - , { "X-Powered-By", "Servlet/2.5 JSP/2.1" } - , { "Content-Type", "text/xml; charset=utf-8" } - , { "Connection", "close" } - } - ,.body= "\n" - "\n" - " \n" - " \n" - " SOAP-ENV:Client\n" - " Client Error\n" - " \n" - " \n" - "" - } - -#define NO_HEADERS_NO_BODY_404 2 -, {.name= "404 no headers no body" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 404 Not Found\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 404 - ,.response_status= "Not Found" - ,.num_headers= 0 - ,.headers= {} - ,.body_size= 0 - ,.body= "" - } - -#define NO_REASON_PHRASE 3 -, {.name= "301 no response phrase" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301\r\n\r\n" - ,.should_keep_alive = FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -#define TRAILING_SPACE_ON_CHUNKED_BODY 4 -, {.name="200 trailing space on chunked body" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/plain\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "25 \r\n" - "This is the data in the first chunk\r\n" - "\r\n" - "1C\r\n" - "and this is the second one\r\n" - "\r\n" - "0 \r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { {"Content-Type", "text/plain" } - , {"Transfer-Encoding", "chunked" } - } - ,.body_size = 37+28 - ,.body = - "This is the data in the first chunk\r\n" - "and this is the second one\r\n" - ,.num_chunks_complete= 3 - ,.chunk_lengths= { 0x25, 0x1c } - } - -#define NO_CARRIAGE_RET 5 -, {.name="no carriage ret" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\n" - "Content-Type: text/html; charset=utf-8\n" - "Connection: close\n" - "\n" - "these headers are from http://news.ycombinator.com/" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { {"Content-Type", "text/html; charset=utf-8" } - , {"Connection", "close" } - } - ,.body= "these headers are from http://news.ycombinator.com/" - } - -#define PROXY_CONNECTION 6 -, {.name="proxy connection" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Content-Length: 11\r\n" - "Proxy-Connection: close\r\n" - "Date: Thu, 31 Dec 2009 20:55:48 +0000\r\n" - "\r\n" - "hello world" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 4 - ,.headers= - { {"Content-Type", "text/html; charset=UTF-8" } - , {"Content-Length", "11" } - , {"Proxy-Connection", "close" } - , {"Date", "Thu, 31 Dec 2009 20:55:48 +0000"} - } - ,.body= "hello world" - } - -#define UNDERSTORE_HEADER_KEY 7 - // shown by - // curl -o /dev/null -v "http://ad.doubleclick.net/pfadx/DARTSHELLCONFIGXML;dcmt=text/xml;" -, {.name="underscore header key" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Server: DCLK-AdSvr\r\n" - "Content-Type: text/xml\r\n" - "Content-Length: 0\r\n" - "DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o\r\n\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 4 - ,.headers= - { {"Server", "DCLK-AdSvr" } - , {"Content-Type", "text/xml" } - , {"Content-Length", "0" } - , {"DCLK_imp", "v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o" } - } - ,.body= "" - } - -#define BONJOUR_MADAME_FR 8 -/* The client should not merge two headers fields when the first one doesn't - * have a value. - */ -, {.name= "bonjourmadame.fr" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 301 Moved Permanently\r\n" - "Date: Thu, 03 Jun 2010 09:56:32 GMT\r\n" - "Server: Apache/2.2.3 (Red Hat)\r\n" - "Cache-Control: public\r\n" - "Pragma: \r\n" - "Location: http://www.bonjourmadame.fr/\r\n" - "Vary: Accept-Encoding\r\n" - "Content-Length: 0\r\n" - "Content-Type: text/html; charset=UTF-8\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 301 - ,.response_status= "Moved Permanently" - ,.num_headers= 9 - ,.headers= - { { "Date", "Thu, 03 Jun 2010 09:56:32 GMT" } - , { "Server", "Apache/2.2.3 (Red Hat)" } - , { "Cache-Control", "public" } - , { "Pragma", "" } - , { "Location", "http://www.bonjourmadame.fr/" } - , { "Vary", "Accept-Encoding" } - , { "Content-Length", "0" } - , { "Content-Type", "text/html; charset=UTF-8" } - , { "Connection", "keep-alive" } - } - ,.body= "" - } - -#define RES_FIELD_UNDERSCORE 9 -/* Should handle spaces in header fields */ -, {.name= "field underscore" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Date: Tue, 28 Sep 2010 01:14:13 GMT\r\n" - "Server: Apache\r\n" - "Cache-Control: no-cache, must-revalidate\r\n" - "Expires: Mon, 26 Jul 1997 05:00:00 GMT\r\n" - ".et-Cookie: PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com\r\n" - "Vary: Accept-Encoding\r\n" - "_eep-Alive: timeout=45\r\n" /* semantic value ignored */ - "_onnection: Keep-Alive\r\n" /* semantic value ignored */ - "Transfer-Encoding: chunked\r\n" - "Content-Type: text/html\r\n" - "Connection: close\r\n" - "\r\n" - "0\r\n\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 11 - ,.headers= - { { "Date", "Tue, 28 Sep 2010 01:14:13 GMT" } - , { "Server", "Apache" } - , { "Cache-Control", "no-cache, must-revalidate" } - , { "Expires", "Mon, 26 Jul 1997 05:00:00 GMT" } - , { ".et-Cookie", "PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com" } - , { "Vary", "Accept-Encoding" } - , { "_eep-Alive", "timeout=45" } - , { "_onnection", "Keep-Alive" } - , { "Transfer-Encoding", "chunked" } - , { "Content-Type", "text/html" } - , { "Connection", "close" } - } - ,.body= "" - ,.num_chunks_complete= 1 - ,.chunk_lengths= {} - } - -#define NON_ASCII_IN_STATUS_LINE 10 -/* Should handle non-ASCII in status line */ -, {.name= "non-ASCII in status line" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 500 Oriëntatieprobleem\r\n" - "Date: Fri, 5 Nov 2010 23:07:12 GMT+2\r\n" - "Content-Length: 0\r\n" - "Connection: close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 500 - ,.response_status= "Oriëntatieprobleem" - ,.num_headers= 3 - ,.headers= - { { "Date", "Fri, 5 Nov 2010 23:07:12 GMT+2" } - , { "Content-Length", "0" } - , { "Connection", "close" } - } - ,.body= "" - } - -#define HTTP_VERSION_0_9 11 -/* Should handle HTTP/0.9 */ -, {.name= "http version 0.9" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/0.9 200 OK\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 0 - ,.http_minor= 9 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 0 - ,.headers= - {} - ,.body= "" - } - -#define NO_CONTENT_LENGTH_NO_TRANSFER_ENCODING_RESPONSE 12 -/* The client should wait for the server's EOF. That is, when neither - * content-length nor transfer-encoding is specified, the end of body - * is specified by the EOF. - */ -, {.name= "neither content-length nor transfer-encoding response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Content-Type: text/plain\r\n" - "\r\n" - "hello world" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Content-Type", "text/plain" } - } - ,.body= "hello world" - } - -#define NO_BODY_HTTP10_KA_200 13 -, {.name= "HTTP/1.0 with keep-alive and EOF-terminated 200 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 200 OK\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Connection", "keep-alive" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP10_KA_204 14 -, {.name= "HTTP/1.0 with keep-alive and a 204 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.0 204 No content\r\n" - "Connection: keep-alive\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 1 - ,.headers= - { { "Connection", "keep-alive" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_200 15 -, {.name= "HTTP/1.1 with an EOF-terminated 200 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 0 - ,.headers={} - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_204 16 -, {.name= "HTTP/1.1 with a 204 status" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 204 No content\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 0 - ,.headers={} - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_NOKA_204 17 -, {.name= "HTTP/1.1 with a 204 status and keep-alive disabled" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 204 No content\r\n" - "Connection: close\r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 204 - ,.response_status= "No content" - ,.num_headers= 1 - ,.headers= - { { "Connection", "close" } - } - ,.body_size= 0 - ,.body= "" - } - -#define NO_BODY_HTTP11_KA_CHUNKED_200 18 -, {.name= "HTTP/1.1 with chunked endocing and a 200 response" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 1 - ,.headers= - { { "Transfer-Encoding", "chunked" } - } - ,.body_size= 0 - ,.body= "" - ,.num_chunks_complete= 1 - } - -#if !HTTP_PARSER_STRICT -#define SPACE_IN_FIELD_RES 19 -/* Should handle spaces in header fields */ -, {.name= "field space" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 OK\r\n" - "Server: Microsoft-IIS/6.0\r\n" - "X-Powered-By: ASP.NET\r\n" - "en-US Content-Type: text/xml\r\n" /* this is the problem */ - "Content-Type: text/xml\r\n" - "Content-Length: 16\r\n" - "Date: Fri, 23 Jul 2010 18:45:38 GMT\r\n" - "Connection: keep-alive\r\n" - "\r\n" - "hello" /* fake body */ - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 7 - ,.headers= - { { "Server", "Microsoft-IIS/6.0" } - , { "X-Powered-By", "ASP.NET" } - , { "en-US Content-Type", "text/xml" } - , { "Content-Type", "text/xml" } - , { "Content-Length", "16" } - , { "Date", "Fri, 23 Jul 2010 18:45:38 GMT" } - , { "Connection", "keep-alive" } - } - ,.body= "hello" - } -#endif /* !HTTP_PARSER_STRICT */ - -#define AMAZON_COM 20 -, {.name= "amazon.com" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 301 MovedPermanently\r\n" - "Date: Wed, 15 May 2013 17:06:33 GMT\r\n" - "Server: Server\r\n" - "x-amz-id-1: 0GPHKXSJQ826RK7GZEB2\r\n" - "p3p: policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"\r\n" - "x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD\r\n" - "Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846\r\n" - "Vary: Accept-Encoding,User-Agent\r\n" - "Content-Type: text/html; charset=ISO-8859-1\r\n" - "Transfer-Encoding: chunked\r\n" - "\r\n" - "1\r\n" - "\n\r\n" - "0\r\n" - "\r\n" - ,.should_keep_alive= TRUE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 301 - ,.response_status= "MovedPermanently" - ,.num_headers= 9 - ,.headers= { { "Date", "Wed, 15 May 2013 17:06:33 GMT" } - , { "Server", "Server" } - , { "x-amz-id-1", "0GPHKXSJQ826RK7GZEB2" } - , { "p3p", "policyref=\"http://www.amazon.com/w3c/p3p.xml\",CP=\"CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC \"" } - , { "x-amz-id-2", "STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD" } - , { "Location", "http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846" } - , { "Vary", "Accept-Encoding,User-Agent" } - , { "Content-Type", "text/html; charset=ISO-8859-1" } - , { "Transfer-Encoding", "chunked" } - } - ,.body= "\n" - ,.num_chunks_complete= 2 - ,.chunk_lengths= { 1 } - } - -#define EMPTY_REASON_PHRASE_AFTER_SPACE 20 -, {.name= "empty reason phrase after space" - ,.type= HTTP_RESPONSE - ,.raw= "HTTP/1.1 200 \r\n" - "\r\n" - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= TRUE - ,.http_major= 1 - ,.http_minor= 1 - ,.status_code= 200 - ,.response_status= "" - ,.num_headers= 0 - ,.headers= {} - ,.body= "" - } - -, {.name= NULL } /* sentinel */ -}; - -/* strnlen() is a POSIX.2008 addition. Can't rely on it being available so - * define it ourselves. - */ -size_t -strnlen(const char *s, size_t maxlen) -{ - const char *p; - - p = memchr(s, '\0', maxlen); - if (p == NULL) - return maxlen; - - return p - s; -} - -size_t -strlncat(char *dst, size_t len, const char *src, size_t n) -{ - size_t slen; - size_t dlen; - size_t rlen; - size_t ncpy; - - slen = strnlen(src, n); - dlen = strnlen(dst, len); - - if (dlen < len) { - rlen = len - dlen; - ncpy = slen < rlen ? slen : (rlen - 1); - memcpy(dst + dlen, src, ncpy); - dst[dlen + ncpy] = '\0'; - } - - assert(len > slen + dlen); - return slen + dlen; -} - -size_t -strlcat(char *dst, const char *src, size_t len) -{ - return strlncat(dst, len, src, (size_t) -1); -} - -size_t -strlncpy(char *dst, size_t len, const char *src, size_t n) -{ - size_t slen; - size_t ncpy; - - slen = strnlen(src, n); - - if (len > 0) { - ncpy = slen < len ? slen : (len - 1); - memcpy(dst, src, ncpy); - dst[ncpy] = '\0'; - } - - assert(len > slen); - return slen; -} - -size_t -strlcpy(char *dst, const char *src, size_t len) -{ - return strlncpy(dst, len, src, (size_t) -1); -} - -int -request_url_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - strlncat(messages[num_messages].request_url, - sizeof(messages[num_messages].request_url), - buf, - len); - return 0; -} - -int -header_field_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - struct message *m = &messages[num_messages]; - - if (m->last_header_element != FIELD) - m->num_headers++; - - strlncat(m->headers[m->num_headers-1][0], - sizeof(m->headers[m->num_headers-1][0]), - buf, - len); - - m->last_header_element = FIELD; - - return 0; -} - -int -header_value_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - struct message *m = &messages[num_messages]; - - strlncat(m->headers[m->num_headers-1][1], - sizeof(m->headers[m->num_headers-1][1]), - buf, - len); - - m->last_header_element = VALUE; - - return 0; -} - -void -check_body_is_final (const http_parser *p) -{ - if (messages[num_messages].body_is_final) { - fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " - "on last on_body callback call " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - messages[num_messages].body_is_final = http_body_is_final(p); -} - -int -body_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - strlncat(messages[num_messages].body, - sizeof(messages[num_messages].body), - buf, - len); - messages[num_messages].body_size += len; - check_body_is_final(p); - // printf("body_cb: '%s'\n", requests[num_messages].body); - return 0; -} - -int -count_body_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - assert(buf); - messages[num_messages].body_size += len; - check_body_is_final(p); - return 0; -} - -int -message_begin_cb (http_parser *p) -{ - assert(p == parser); - messages[num_messages].message_begin_cb_called = TRUE; - return 0; -} - -int -headers_complete_cb (http_parser *p) -{ - assert(p == parser); - messages[num_messages].method = parser->method; - messages[num_messages].status_code = parser->status_code; - messages[num_messages].http_major = parser->http_major; - messages[num_messages].http_minor = parser->http_minor; - messages[num_messages].headers_complete_cb_called = TRUE; - messages[num_messages].should_keep_alive = http_should_keep_alive(parser); - return 0; -} - -int -message_complete_cb (http_parser *p) -{ - assert(p == parser); - if (messages[num_messages].should_keep_alive != http_should_keep_alive(parser)) - { - fprintf(stderr, "\n\n *** Error http_should_keep_alive() should have same " - "value in both on_message_complete and on_headers_complete " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - - if (messages[num_messages].body_size && - http_body_is_final(p) && - !messages[num_messages].body_is_final) - { - fprintf(stderr, "\n\n *** Error http_body_is_final() should return 1 " - "on last on_body callback call " - "but it doesn't! ***\n\n"); - assert(0); - abort(); - } - - messages[num_messages].message_complete_cb_called = TRUE; - - messages[num_messages].message_complete_on_eof = currently_parsing_eof; - - num_messages++; - return 0; -} - -int -response_status_cb (http_parser *p, const char *buf, size_t len) -{ - assert(p == parser); - strlncat(messages[num_messages].response_status, - sizeof(messages[num_messages].response_status), - buf, - len); - return 0; -} - -int -chunk_header_cb (http_parser *p) -{ - assert(p == parser); - int chunk_idx = messages[num_messages].num_chunks; - messages[num_messages].num_chunks++; - if (chunk_idx < MAX_CHUNKS) { - messages[num_messages].chunk_lengths[chunk_idx] = p->content_length; - } - - return 0; -} - -int -chunk_complete_cb (http_parser *p) -{ - assert(p == parser); - - /* Here we want to verify that each chunk_header_cb is matched by a - * chunk_complete_cb, so not only should the total number of calls to - * both callbacks be the same, but they also should be interleaved - * properly */ - assert(messages[num_messages].num_chunks == - messages[num_messages].num_chunks_complete + 1); - - messages[num_messages].num_chunks_complete++; - return 0; -} - -/* These dontcall_* callbacks exist so that we can verify that when we're - * paused, no additional callbacks are invoked */ -int -dontcall_message_begin_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_message_begin() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_header_field_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_header_field() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_header_value_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_header_value() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_request_url_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_request_url() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_body_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_body_cb() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_headers_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_headers_complete() called on paused " - "parser ***\n\n"); - abort(); -} - -int -dontcall_message_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_message_complete() called on paused " - "parser ***\n\n"); - abort(); -} - -int -dontcall_response_status_cb (http_parser *p, const char *buf, size_t len) -{ - if (p || buf || len) { } // gcc - fprintf(stderr, "\n\n*** on_status() called on paused parser ***\n\n"); - abort(); -} - -int -dontcall_chunk_header_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_chunk_header() called on paused parser ***\n\n"); - exit(1); -} - -int -dontcall_chunk_complete_cb (http_parser *p) -{ - if (p) { } // gcc - fprintf(stderr, "\n\n*** on_chunk_complete() " - "called on paused parser ***\n\n"); - exit(1); -} - -static http_parser_settings settings_dontcall = - {.on_message_begin = dontcall_message_begin_cb - ,.on_header_field = dontcall_header_field_cb - ,.on_header_value = dontcall_header_value_cb - ,.on_url = dontcall_request_url_cb - ,.on_status = dontcall_response_status_cb - ,.on_body = dontcall_body_cb - ,.on_headers_complete = dontcall_headers_complete_cb - ,.on_message_complete = dontcall_message_complete_cb - ,.on_chunk_header = dontcall_chunk_header_cb - ,.on_chunk_complete = dontcall_chunk_complete_cb - }; - -/* These pause_* callbacks always pause the parser and just invoke the regular - * callback that tracks content. Before returning, we overwrite the parser - * settings to point to the _dontcall variety so that we can verify that - * the pause actually did, you know, pause. */ -int -pause_message_begin_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return message_begin_cb(p); -} - -int -pause_header_field_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return header_field_cb(p, buf, len); -} - -int -pause_header_value_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return header_value_cb(p, buf, len); -} - -int -pause_request_url_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return request_url_cb(p, buf, len); -} - -int -pause_body_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return body_cb(p, buf, len); -} - -int -pause_headers_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return headers_complete_cb(p); -} - -int -pause_message_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return message_complete_cb(p); -} - -int -pause_response_status_cb (http_parser *p, const char *buf, size_t len) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return response_status_cb(p, buf, len); -} - -int -pause_chunk_header_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return chunk_header_cb(p); -} - -int -pause_chunk_complete_cb (http_parser *p) -{ - http_parser_pause(p, 1); - *current_pause_parser = settings_dontcall; - return chunk_complete_cb(p); -} - -static http_parser_settings settings_pause = - {.on_message_begin = pause_message_begin_cb - ,.on_header_field = pause_header_field_cb - ,.on_header_value = pause_header_value_cb - ,.on_url = pause_request_url_cb - ,.on_status = pause_response_status_cb - ,.on_body = pause_body_cb - ,.on_headers_complete = pause_headers_complete_cb - ,.on_message_complete = pause_message_complete_cb - ,.on_chunk_header = pause_chunk_header_cb - ,.on_chunk_complete = pause_chunk_complete_cb - }; - -static http_parser_settings settings = - {.on_message_begin = message_begin_cb - ,.on_header_field = header_field_cb - ,.on_header_value = header_value_cb - ,.on_url = request_url_cb - ,.on_status = response_status_cb - ,.on_body = body_cb - ,.on_headers_complete = headers_complete_cb - ,.on_message_complete = message_complete_cb - ,.on_chunk_header = chunk_header_cb - ,.on_chunk_complete = chunk_complete_cb - }; - -static http_parser_settings settings_count_body = - {.on_message_begin = message_begin_cb - ,.on_header_field = header_field_cb - ,.on_header_value = header_value_cb - ,.on_url = request_url_cb - ,.on_status = response_status_cb - ,.on_body = count_body_cb - ,.on_headers_complete = headers_complete_cb - ,.on_message_complete = message_complete_cb - ,.on_chunk_header = chunk_header_cb - ,.on_chunk_complete = chunk_complete_cb - }; - -static http_parser_settings settings_null = - {.on_message_begin = 0 - ,.on_header_field = 0 - ,.on_header_value = 0 - ,.on_url = 0 - ,.on_status = 0 - ,.on_body = 0 - ,.on_headers_complete = 0 - ,.on_message_complete = 0 - ,.on_chunk_header = 0 - ,.on_chunk_complete = 0 - }; - -void -parser_init (enum http_parser_type type) -{ - num_messages = 0; - - assert(parser == NULL); - - parser = malloc(sizeof(http_parser)); - - http_parser_init(parser, type); - - memset(&messages, 0, sizeof messages); - -} - -void -parser_free () -{ - assert(parser); - free(parser); - parser = NULL; -} - -size_t parse (const char *buf, size_t len) -{ - size_t nparsed; - currently_parsing_eof = (len == 0); - nparsed = http_parser_execute(parser, &settings, buf, len); - return nparsed; -} - -size_t parse_count_body (const char *buf, size_t len) -{ - size_t nparsed; - currently_parsing_eof = (len == 0); - nparsed = http_parser_execute(parser, &settings_count_body, buf, len); - return nparsed; -} - -size_t parse_pause (const char *buf, size_t len) -{ - size_t nparsed; - http_parser_settings s = settings_pause; - - currently_parsing_eof = (len == 0); - current_pause_parser = &s; - nparsed = http_parser_execute(parser, current_pause_parser, buf, len); - return nparsed; -} - -static inline int -check_str_eq (const struct message *m, - const char *prop, - const char *expected, - const char *found) { - if ((expected == NULL) != (found == NULL)) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected %s\n", (expected == NULL) ? "NULL" : expected); - printf(" found %s\n", (found == NULL) ? "NULL" : found); - return 0; - } - if (expected != NULL && 0 != strcmp(expected, found)) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected '%s'\n", expected); - printf(" found '%s'\n", found); - return 0; - } - return 1; -} - -static inline int -check_num_eq (const struct message *m, - const char *prop, - int expected, - int found) { - if (expected != found) { - printf("\n*** Error: %s in '%s' ***\n\n", prop, m->name); - printf("expected %d\n", expected); - printf(" found %d\n", found); - return 0; - } - return 1; -} - -#define MESSAGE_CHECK_STR_EQ(expected, found, prop) \ - if (!check_str_eq(expected, #prop, expected->prop, found->prop)) return 0 - -#define MESSAGE_CHECK_NUM_EQ(expected, found, prop) \ - if (!check_num_eq(expected, #prop, expected->prop, found->prop)) return 0 - -#define MESSAGE_CHECK_URL_EQ(u, expected, found, prop, fn) \ -do { \ - char ubuf[256]; \ - \ - if ((u)->field_set & (1 << (fn))) { \ - memcpy(ubuf, (found)->request_url + (u)->field_data[(fn)].off, \ - (u)->field_data[(fn)].len); \ - ubuf[(u)->field_data[(fn)].len] = '\0'; \ - } else { \ - ubuf[0] = '\0'; \ - } \ - \ - check_str_eq(expected, #prop, expected->prop, ubuf); \ -} while(0) - -int -message_eq (int index, const struct message *expected) -{ - int i; - struct message *m = &messages[index]; - - MESSAGE_CHECK_NUM_EQ(expected, m, http_major); - MESSAGE_CHECK_NUM_EQ(expected, m, http_minor); - - if (expected->type == HTTP_REQUEST) { - MESSAGE_CHECK_NUM_EQ(expected, m, method); - } else { - MESSAGE_CHECK_NUM_EQ(expected, m, status_code); - MESSAGE_CHECK_STR_EQ(expected, m, response_status); - } - - MESSAGE_CHECK_NUM_EQ(expected, m, should_keep_alive); - MESSAGE_CHECK_NUM_EQ(expected, m, message_complete_on_eof); - - assert(m->message_begin_cb_called); - assert(m->headers_complete_cb_called); - assert(m->message_complete_cb_called); - - - MESSAGE_CHECK_STR_EQ(expected, m, request_url); - - /* Check URL components; we can't do this w/ CONNECT since it doesn't - * send us a well-formed URL. - */ - if (*m->request_url && m->method != HTTP_CONNECT) { - struct http_parser_url u; - - if (http_parser_parse_url(m->request_url, strlen(m->request_url), 0, &u)) { - fprintf(stderr, "\n\n*** failed to parse URL %s ***\n\n", - m->request_url); - abort(); - } - - if (expected->host) { - MESSAGE_CHECK_URL_EQ(&u, expected, m, host, UF_HOST); - } - - if (expected->userinfo) { - MESSAGE_CHECK_URL_EQ(&u, expected, m, userinfo, UF_USERINFO); - } - - m->port = (u.field_set & (1 << UF_PORT)) ? - u.port : 0; - - MESSAGE_CHECK_URL_EQ(&u, expected, m, query_string, UF_QUERY); - MESSAGE_CHECK_URL_EQ(&u, expected, m, fragment, UF_FRAGMENT); - MESSAGE_CHECK_URL_EQ(&u, expected, m, request_path, UF_PATH); - MESSAGE_CHECK_NUM_EQ(expected, m, port); - } - - if (expected->body_size) { - MESSAGE_CHECK_NUM_EQ(expected, m, body_size); - } else { - MESSAGE_CHECK_STR_EQ(expected, m, body); - } - - assert(m->num_chunks == m->num_chunks_complete); - MESSAGE_CHECK_NUM_EQ(expected, m, num_chunks_complete); - for (i = 0; i < m->num_chunks && i < MAX_CHUNKS; i++) { - MESSAGE_CHECK_NUM_EQ(expected, m, chunk_lengths[i]); - } - - MESSAGE_CHECK_NUM_EQ(expected, m, num_headers); - - int r; - for (i = 0; i < m->num_headers; i++) { - r = check_str_eq(expected, "header field", expected->headers[i][0], m->headers[i][0]); - if (!r) return 0; - r = check_str_eq(expected, "header value", expected->headers[i][1], m->headers[i][1]); - if (!r) return 0; - } - - MESSAGE_CHECK_STR_EQ(expected, m, upgrade); - - return 1; -} - -/* Given a sequence of varargs messages, return the number of them that the - * parser should successfully parse, taking into account that upgraded - * messages prevent all subsequent messages from being parsed. - */ -size_t -count_parsed_messages(const size_t nmsgs, ...) { - size_t i; - va_list ap; - - va_start(ap, nmsgs); - - for (i = 0; i < nmsgs; i++) { - struct message *m = va_arg(ap, struct message *); - - if (m->upgrade) { - va_end(ap); - return i + 1; - } - } - - va_end(ap); - return nmsgs; -} - -/* Given a sequence of bytes and the number of these that we were able to - * parse, verify that upgrade bodies are correct. - */ -void -upgrade_message_fix(char *body, const size_t nread, const size_t nmsgs, ...) { - va_list ap; - size_t i; - size_t off = 0; - - va_start(ap, nmsgs); - - for (i = 0; i < nmsgs; i++) { - struct message *m = va_arg(ap, struct message *); - - off += strlen(m->raw); - - if (m->upgrade) { - off -= strlen(m->upgrade); - - /* Check the portion of the response after its specified upgrade */ - if (!check_str_eq(m, "upgrade", body + off, body + nread)) { - abort(); - } - - /* Fix up the response so that message_eq() will verify the beginning - * of the upgrade */ - *(body + nread + strlen(m->upgrade)) = '\0'; - messages[num_messages -1 ].upgrade = body + nread; - - va_end(ap); - return; - } - } - - va_end(ap); - printf("\n\n*** Error: expected a message with upgrade ***\n"); - - abort(); -} - -static void -print_error (const char *raw, size_t error_location) -{ - fprintf(stderr, "\n*** %s ***\n\n", - http_errno_description(HTTP_PARSER_ERRNO(parser))); - - int this_line = 0, char_len = 0; - size_t i, j, len = strlen(raw), error_location_line = 0; - for (i = 0; i < len; i++) { - if (i == error_location) this_line = 1; - switch (raw[i]) { - case '\r': - char_len = 2; - fprintf(stderr, "\\r"); - break; - - case '\n': - fprintf(stderr, "\\n\n"); - - if (this_line) goto print; - - error_location_line = 0; - continue; - - default: - char_len = 1; - fputc(raw[i], stderr); - break; - } - if (!this_line) error_location_line += char_len; - } - - fprintf(stderr, "[eof]\n"); - - print: - for (j = 0; j < error_location_line; j++) { - fputc(' ', stderr); - } - fprintf(stderr, "^\n\nerror location: %u\n", (unsigned int)error_location); -} - -void -test_preserve_data (void) -{ - char my_data[] = "application-specific data"; - http_parser parser; - parser.data = my_data; - http_parser_init(&parser, HTTP_REQUEST); - if (parser.data != my_data) { - printf("\n*** parser.data not preserved accross http_parser_init ***\n\n"); - abort(); - } -} - -struct url_test { - const char *name; - const char *url; - int is_connect; - struct http_parser_url u; - int rv; -}; - -const struct url_test url_tests[] = -{ {.name="proxy request" - ,.url="http://hostname/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 7, 8 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 15, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="proxy request with port" - ,.url="http://hostname:444/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) - ,.port=444 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 7, 8 } /* UF_HOST */ - ,{ 16, 3 } /* UF_PORT */ - ,{ 19, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT request" - ,.url="hostname:443" - ,.is_connect=1 - ,.u= - {.field_set=(1 << UF_HOST) | (1 << UF_PORT) - ,.port=443 - ,.field_data= - {{ 0, 0 } /* UF_SCHEMA */ - ,{ 0, 8 } /* UF_HOST */ - ,{ 9, 3 } /* UF_PORT */ - ,{ 0, 0 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT request but not connect" - ,.url="hostname:443" - ,.is_connect=0 - ,.rv=1 - } - -, {.name="proxy ipv6 request" - ,.url="http://[1:2::3:4]/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 8 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 17, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="proxy ipv6 request with port" - ,.url="http://[1:2::3:4]:67/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PORT) | (1 << UF_PATH) - ,.port=67 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 8 } /* UF_HOST */ - ,{ 18, 2 } /* UF_PORT */ - ,{ 20, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="CONNECT ipv6 address" - ,.url="[1:2::3:4]:443" - ,.is_connect=1 - ,.u= - {.field_set=(1 << UF_HOST) | (1 << UF_PORT) - ,.port=443 - ,.field_data= - {{ 0, 0 } /* UF_SCHEMA */ - ,{ 1, 8 } /* UF_HOST */ - ,{ 11, 3 } /* UF_PORT */ - ,{ 0, 0 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="ipv4 in ipv6 address" - ,.url="http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/" - ,.is_connect=0 - ,.u= - {.field_set=(1 << UF_SCHEMA) | (1 << UF_HOST) | (1 << UF_PATH) - ,.port=0 - ,.field_data= - {{ 0, 4 } /* UF_SCHEMA */ - ,{ 8, 37 } /* UF_HOST */ - ,{ 0, 0 } /* UF_PORT */ - ,{ 46, 1 } /* UF_PATH */ - ,{ 0, 0 } /* UF_QUERY */ - ,{ 0, 0 } /* UF_FRAGMENT */ - ,{ 0, 0 } /* UF_USERINFO */ - } - } - ,.rv=0 - } - -, {.name="extra ? in query string" - ,.url="http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css," - "fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css," - "fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css" - ,.is_connect=0 - ,.u= - {.field_set=(1<field_set, u->port); - for (i = 0; i < UF_MAX; i++) { - if ((u->field_set & (1 << i)) == 0) { - printf("\tfield_data[%u]: unset\n", i); - continue; - } - - printf("\tfield_data[%u]: off: %u len: %u part: \"%.*s\n\"", - i, - u->field_data[i].off, - u->field_data[i].len, - u->field_data[i].len, - url + u->field_data[i].off); - } -} - -void -test_parse_url (void) -{ - struct http_parser_url u; - const struct url_test *test; - unsigned int i; - int rv; - - for (i = 0; i < (sizeof(url_tests) / sizeof(url_tests[0])); i++) { - test = &url_tests[i]; - memset(&u, 0, sizeof(u)); - - rv = http_parser_parse_url(test->url, - strlen(test->url), - test->is_connect, - &u); - - if (test->rv == 0) { - if (rv != 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " - "unexpected rv %d ***\n\n", test->url, test->name, rv); - abort(); - } - - if (memcmp(&u, &test->u, sizeof(u)) != 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" failed ***\n", - test->url, test->name); - - printf("target http_parser_url:\n"); - dump_url(test->url, &test->u); - printf("result http_parser_url:\n"); - dump_url(test->url, &u); - - abort(); - } - } else { - /* test->rv != 0 */ - if (rv == 0) { - printf("\n*** http_parser_parse_url(\"%s\") \"%s\" test failed, " - "unexpected rv %d ***\n\n", test->url, test->name, rv); - abort(); - } - } - } -} - -void -test_method_str (void) -{ - assert(0 == strcmp("GET", http_method_str(HTTP_GET))); - assert(0 == strcmp("", http_method_str(1337))); -} - -void -test_message (const struct message *message) -{ - size_t raw_len = strlen(message->raw); - size_t msg1len; - for (msg1len = 0; msg1len < raw_len; msg1len++) { - parser_init(message->type); - - size_t read; - const char *msg1 = message->raw; - const char *msg2 = msg1 + msg1len; - size_t msg2len = raw_len - msg1len; - - if (msg1len) { - read = parse(msg1, msg1len); - - if (message->upgrade && parser->upgrade && num_messages > 0) { - messages[num_messages - 1].upgrade = msg1 + read; - goto test; - } - - if (read != msg1len) { - print_error(msg1, read); - abort(); - } - } - - - read = parse(msg2, msg2len); - - if (message->upgrade && parser->upgrade) { - messages[num_messages - 1].upgrade = msg2 + read; - goto test; - } - - if (read != msg2len) { - print_error(msg2, read); - abort(); - } - - read = parse(NULL, 0); - - if (read != 0) { - print_error(message->raw, read); - abort(); - } - - test: - - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); - abort(); - } - - if(!message_eq(0, message)) abort(); - - parser_free(); - } -} - -void -test_message_count_body (const struct message *message) -{ - parser_init(message->type); - - size_t read; - size_t l = strlen(message->raw); - size_t i, toread; - size_t chunk = 4024; - - for (i = 0; i < l; i+= chunk) { - toread = MIN(l-i, chunk); - read = parse_count_body(message->raw + i, toread); - if (read != toread) { - print_error(message->raw, read); - abort(); - } - } - - - read = parse_count_body(NULL, 0); - if (read != 0) { - print_error(message->raw, read); - abort(); - } - - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", message->name); - abort(); - } - - if(!message_eq(0, message)) abort(); - - parser_free(); -} - -void -test_simple (const char *buf, enum http_errno err_expected) -{ - parser_init(HTTP_REQUEST); - - enum http_errno err; - - parse(buf, strlen(buf)); - err = HTTP_PARSER_ERRNO(parser); - parse(NULL, 0); - - parser_free(); - - /* In strict mode, allow us to pass with an unexpected HPE_STRICT as - * long as the caller isn't expecting success. - */ -#if HTTP_PARSER_STRICT - if (err_expected != err && err_expected != HPE_OK && err != HPE_STRICT) { -#else - if (err_expected != err) { -#endif - fprintf(stderr, "\n*** test_simple expected %s, but saw %s ***\n\n%s\n", - http_errno_name(err_expected), http_errno_name(err), buf); - abort(); - } -} - -void -test_header_overflow_error (int req) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - const char *buf; - buf = req ? "GET / HTTP/1.1\r\n" : "HTTP/1.0 200 OK\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - buf = "header-key: header-value\r\n"; - size_t buflen = strlen(buf); - - int i; - for (i = 0; i < 10000; i++) { - parsed = http_parser_execute(&parser, &settings_null, buf, buflen); - if (parsed != buflen) { - //fprintf(stderr, "error found on iter %d\n", i); - assert(HTTP_PARSER_ERRNO(&parser) == HPE_HEADER_OVERFLOW); - return; - } - } - - fprintf(stderr, "\n*** Error expected but none in header overflow test ***\n"); - abort(); -} - - -void -test_header_nread_value () -{ - http_parser parser; - http_parser_init(&parser, HTTP_REQUEST); - size_t parsed; - const char *buf; - buf = "GET / HTTP/1.1\r\nheader: value\nhdr: value\r\n"; - parsed = http_parser_execute(&parser, &settings_null, buf, strlen(buf)); - assert(parsed == strlen(buf)); - - assert(parser.nread == strlen(buf)); -} - - -static void -test_content_length_overflow (const char *buf, size_t buflen, int expect_ok) -{ - http_parser parser; - http_parser_init(&parser, HTTP_RESPONSE); - http_parser_execute(&parser, &settings_null, buf, buflen); - - if (expect_ok) - assert(HTTP_PARSER_ERRNO(&parser) == HPE_OK); - else - assert(HTTP_PARSER_ERRNO(&parser) == HPE_INVALID_CONTENT_LENGTH); -} - -void -test_header_content_length_overflow_error (void) -{ -#define X(size) \ - "HTTP/1.1 200 OK\r\n" \ - "Content-Length: " #size "\r\n" \ - "\r\n" - const char a[] = X(1844674407370955160); /* 2^64 / 10 - 1 */ - const char b[] = X(18446744073709551615); /* 2^64-1 */ - const char c[] = X(18446744073709551616); /* 2^64 */ -#undef X - test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ - test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ - test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ -} - -void -test_chunk_content_length_overflow_error (void) -{ -#define X(size) \ - "HTTP/1.1 200 OK\r\n" \ - "Transfer-Encoding: chunked\r\n" \ - "\r\n" \ - #size "\r\n" \ - "..." - const char a[] = X(FFFFFFFFFFFFFFE); /* 2^64 / 16 - 1 */ - const char b[] = X(FFFFFFFFFFFFFFFF); /* 2^64-1 */ - const char c[] = X(10000000000000000); /* 2^64 */ -#undef X - test_content_length_overflow(a, sizeof(a) - 1, 1); /* expect ok */ - test_content_length_overflow(b, sizeof(b) - 1, 0); /* expect failure */ - test_content_length_overflow(c, sizeof(c) - 1, 0); /* expect failure */ -} - -void -test_no_overflow_long_body (int req, size_t length) -{ - http_parser parser; - http_parser_init(&parser, req ? HTTP_REQUEST : HTTP_RESPONSE); - size_t parsed; - size_t i; - char buf1[3000]; - size_t buf1len = sprintf(buf1, "%s\r\nConnection: Keep-Alive\r\nContent-Length: %lu\r\n\r\n", - req ? "POST / HTTP/1.0" : "HTTP/1.0 200 OK", (unsigned long)length); - parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); - if (parsed != buf1len) - goto err; - - for (i = 0; i < length; i++) { - char foo = 'a'; - parsed = http_parser_execute(&parser, &settings_null, &foo, 1); - if (parsed != 1) - goto err; - } - - parsed = http_parser_execute(&parser, &settings_null, buf1, buf1len); - if (parsed != buf1len) goto err; - return; - - err: - fprintf(stderr, - "\n*** error in test_no_overflow_long_body %s of length %lu ***\n", - req ? "REQUEST" : "RESPONSE", - (unsigned long)length); - abort(); -} - -void -test_multiple3 (const struct message *r1, const struct message *r2, const struct message *r3) -{ - int message_count = count_parsed_messages(3, r1, r2, r3); - - char total[ strlen(r1->raw) - + strlen(r2->raw) - + strlen(r3->raw) - + 1 - ]; - total[0] = '\0'; - - strcat(total, r1->raw); - strcat(total, r2->raw); - strcat(total, r3->raw); - - parser_init(r1->type); - - size_t read; - - read = parse(total, strlen(total)); - - if (parser->upgrade) { - upgrade_message_fix(total, read, 3, r1, r2, r3); - goto test; - } - - if (read != strlen(total)) { - print_error(total, read); - abort(); - } - - read = parse(NULL, 0); - - if (read != 0) { - print_error(total, read); - abort(); - } - -test: - - if (message_count != num_messages) { - fprintf(stderr, "\n\n*** Parser didn't see 3 messages only %d *** \n", num_messages); - abort(); - } - - if (!message_eq(0, r1)) abort(); - if (message_count > 1 && !message_eq(1, r2)) abort(); - if (message_count > 2 && !message_eq(2, r3)) abort(); - - parser_free(); -} - -/* SCAN through every possible breaking to make sure the - * parser can handle getting the content in any chunks that - * might come from the socket - */ -void -test_scan (const struct message *r1, const struct message *r2, const struct message *r3) -{ - char total[80*1024] = "\0"; - char buf1[80*1024] = "\0"; - char buf2[80*1024] = "\0"; - char buf3[80*1024] = "\0"; - - strcat(total, r1->raw); - strcat(total, r2->raw); - strcat(total, r3->raw); - - size_t read; - - int total_len = strlen(total); - - int total_ops = 2 * (total_len - 1) * (total_len - 2) / 2; - int ops = 0 ; - - size_t buf1_len, buf2_len, buf3_len; - int message_count = count_parsed_messages(3, r1, r2, r3); - - int i,j,type_both; - for (type_both = 0; type_both < 2; type_both ++ ) { - for (j = 2; j < total_len; j ++ ) { - for (i = 1; i < j; i ++ ) { - - if (ops % 1000 == 0) { - printf("\b\b\b\b%3.0f%%", 100 * (float)ops /(float)total_ops); - fflush(stdout); - } - ops += 1; - - parser_init(type_both ? HTTP_BOTH : r1->type); - - buf1_len = i; - strlncpy(buf1, sizeof(buf1), total, buf1_len); - buf1[buf1_len] = 0; - - buf2_len = j - i; - strlncpy(buf2, sizeof(buf1), total+i, buf2_len); - buf2[buf2_len] = 0; - - buf3_len = total_len - j; - strlncpy(buf3, sizeof(buf1), total+j, buf3_len); - buf3[buf3_len] = 0; - - read = parse(buf1, buf1_len); - - if (parser->upgrade) goto test; - - if (read != buf1_len) { - print_error(buf1, read); - goto error; - } - - read += parse(buf2, buf2_len); - - if (parser->upgrade) goto test; - - if (read != buf1_len + buf2_len) { - print_error(buf2, read); - goto error; - } - - read += parse(buf3, buf3_len); - - if (parser->upgrade) goto test; - - if (read != buf1_len + buf2_len + buf3_len) { - print_error(buf3, read); - goto error; - } - - parse(NULL, 0); - -test: - if (parser->upgrade) { - upgrade_message_fix(total, read, 3, r1, r2, r3); - } - - if (message_count != num_messages) { - fprintf(stderr, "\n\nParser didn't see %d messages only %d\n", - message_count, num_messages); - goto error; - } - - if (!message_eq(0, r1)) { - fprintf(stderr, "\n\nError matching messages[0] in test_scan.\n"); - goto error; - } - - if (message_count > 1 && !message_eq(1, r2)) { - fprintf(stderr, "\n\nError matching messages[1] in test_scan.\n"); - goto error; - } - - if (message_count > 2 && !message_eq(2, r3)) { - fprintf(stderr, "\n\nError matching messages[2] in test_scan.\n"); - goto error; - } - - parser_free(); - } - } - } - puts("\b\b\b\b100%"); - return; - - error: - fprintf(stderr, "i=%d j=%d\n", i, j); - fprintf(stderr, "buf1 (%u) %s\n\n", (unsigned int)buf1_len, buf1); - fprintf(stderr, "buf2 (%u) %s\n\n", (unsigned int)buf2_len , buf2); - fprintf(stderr, "buf3 (%u) %s\n", (unsigned int)buf3_len, buf3); - abort(); -} - -// user required to free the result -// string terminated by \0 -char * -create_large_chunked_message (int body_size_in_kb, const char* headers) -{ - int i; - size_t wrote = 0; - size_t headers_len = strlen(headers); - size_t bufsize = headers_len + (5+1024+2)*body_size_in_kb + 6; - char * buf = malloc(bufsize); - - memcpy(buf, headers, headers_len); - wrote += headers_len; - - for (i = 0; i < body_size_in_kb; i++) { - // write 1kb chunk into the body. - memcpy(buf + wrote, "400\r\n", 5); - wrote += 5; - memset(buf + wrote, 'C', 1024); - wrote += 1024; - strcpy(buf + wrote, "\r\n"); - wrote += 2; - } - - memcpy(buf + wrote, "0\r\n\r\n", 6); - wrote += 6; - assert(wrote == bufsize); - - return buf; -} - -/* Verify that we can pause parsing at any of the bytes in the - * message and still get the result that we're expecting. */ -void -test_message_pause (const struct message *msg) -{ - char *buf = (char*) msg->raw; - size_t buflen = strlen(msg->raw); - size_t nread; - - parser_init(msg->type); - - do { - nread = parse_pause(buf, buflen); - - // We can only set the upgrade buffer once we've gotten our message - // completion callback. - if (messages[0].message_complete_cb_called && - msg->upgrade && - parser->upgrade) { - messages[0].upgrade = buf + nread; - goto test; - } - - if (nread < buflen) { - - // Not much do to if we failed a strict-mode check - if (HTTP_PARSER_ERRNO(parser) == HPE_STRICT) { - parser_free(); - return; - } - - assert (HTTP_PARSER_ERRNO(parser) == HPE_PAUSED); - } - - buf += nread; - buflen -= nread; - http_parser_pause(parser, 0); - } while (buflen > 0); - - nread = parse_pause(NULL, 0); - assert (nread == 0); - -test: - if (num_messages != 1) { - printf("\n*** num_messages != 1 after testing '%s' ***\n\n", msg->name); - abort(); - } - - if(!message_eq(0, msg)) abort(); - - parser_free(); -} - -int -main (void) -{ - parser = NULL; - int i, j, k; - int request_count; - int response_count; - unsigned long version; - unsigned major; - unsigned minor; - unsigned patch; - - version = http_parser_version(); - major = (version >> 16) & 255; - minor = (version >> 8) & 255; - patch = version & 255; - printf("http_parser v%u.%u.%u (0x%06lx)\n", major, minor, patch, version); - - printf("sizeof(http_parser) = %u\n", (unsigned int)sizeof(http_parser)); - - for (request_count = 0; requests[request_count].name; request_count++); - for (response_count = 0; responses[response_count].name; response_count++); - - //// API - test_preserve_data(); - test_parse_url(); - test_method_str(); - - //// NREAD - test_header_nread_value(); - - //// OVERFLOW CONDITIONS - - test_header_overflow_error(HTTP_REQUEST); - test_no_overflow_long_body(HTTP_REQUEST, 1000); - test_no_overflow_long_body(HTTP_REQUEST, 100000); - - test_header_overflow_error(HTTP_RESPONSE); - test_no_overflow_long_body(HTTP_RESPONSE, 1000); - test_no_overflow_long_body(HTTP_RESPONSE, 100000); - - test_header_content_length_overflow_error(); - test_chunk_content_length_overflow_error(); - - //// RESPONSES - - for (i = 0; i < response_count; i++) { - test_message(&responses[i]); - } - - for (i = 0; i < response_count; i++) { - test_message_pause(&responses[i]); - } - - for (i = 0; i < response_count; i++) { - if (!responses[i].should_keep_alive) continue; - for (j = 0; j < response_count; j++) { - if (!responses[j].should_keep_alive) continue; - for (k = 0; k < response_count; k++) { - test_multiple3(&responses[i], &responses[j], &responses[k]); - } - } - } - - test_message_count_body(&responses[NO_HEADERS_NO_BODY_404]); - test_message_count_body(&responses[TRAILING_SPACE_ON_CHUNKED_BODY]); - - // test very large chunked response - { - char * msg = create_large_chunked_message(31337, - "HTTP/1.0 200 OK\r\n" - "Transfer-Encoding: chunked\r\n" - "Content-Type: text/plain\r\n" - "\r\n"); - struct message large_chunked = - {.name= "large chunked" - ,.type= HTTP_RESPONSE - ,.raw= msg - ,.should_keep_alive= FALSE - ,.message_complete_on_eof= FALSE - ,.http_major= 1 - ,.http_minor= 0 - ,.status_code= 200 - ,.response_status= "OK" - ,.num_headers= 2 - ,.headers= - { { "Transfer-Encoding", "chunked" } - , { "Content-Type", "text/plain" } - } - ,.body_size= 31337*1024 - ,.num_chunks_complete= 31338 - }; - for (i = 0; i < MAX_CHUNKS; i++) { - large_chunked.chunk_lengths[i] = 1024; - } - test_message_count_body(&large_chunked); - free(msg); - } - - - - printf("response scan 1/2 "); - test_scan( &responses[TRAILING_SPACE_ON_CHUNKED_BODY] - , &responses[NO_BODY_HTTP10_KA_204] - , &responses[NO_REASON_PHRASE] - ); - - printf("response scan 2/2 "); - test_scan( &responses[BONJOUR_MADAME_FR] - , &responses[UNDERSTORE_HEADER_KEY] - , &responses[NO_CARRIAGE_RET] - ); - - puts("responses okay"); - - - /// REQUESTS - - test_simple("GET / HTP/1.1\r\n\r\n", HPE_INVALID_VERSION); - - // Well-formed but incomplete - test_simple("GET / HTTP/1.1\r\n" - "Content-Type: text/plain\r\n" - "Content-Length: 6\r\n" - "\r\n" - "fooba", - HPE_OK); - - static const char *all_methods[] = { - "DELETE", - "GET", - "HEAD", - "POST", - "PUT", - //"CONNECT", //CONNECT can't be tested like other methods, it's a tunnel - "OPTIONS", - "TRACE", - "COPY", - "LOCK", - "MKCOL", - "MOVE", - "PROPFIND", - "PROPPATCH", - "UNLOCK", - "REPORT", - "MKACTIVITY", - "CHECKOUT", - "MERGE", - "M-SEARCH", - "NOTIFY", - "SUBSCRIBE", - "UNSUBSCRIBE", - "PATCH", - 0 }; - const char **this_method; - for (this_method = all_methods; *this_method; this_method++) { - char buf[200]; - sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); - test_simple(buf, HPE_OK); - } - - static const char *bad_methods[] = { - "ASDF", - "C******", - "COLA", - "GEM", - "GETA", - "M****", - "MKCOLA", - "PROPPATCHA", - "PUN", - "PX", - "SA", - "hello world", - 0 }; - for (this_method = bad_methods; *this_method; this_method++) { - char buf[200]; - sprintf(buf, "%s / HTTP/1.1\r\n\r\n", *this_method); - test_simple(buf, HPE_INVALID_METHOD); - } - - // illegal header field name line folding - test_simple("GET / HTTP/1.1\r\n" - "name\r\n" - " : value\r\n" - "\r\n", - HPE_INVALID_HEADER_TOKEN); - - const char *dumbfuck2 = - "GET / HTTP/1.1\r\n" - "X-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n" - "\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n" - "\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n" - "\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n" - "\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n" - "\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n" - "\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n" - "\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n" - "\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n" - "\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n" - "\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n" - "\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n" - "\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n" - "\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n" - "\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n" - "\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n" - "\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n" - "\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n" - "\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n" - "\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n" - "\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n" - "\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n" - "\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n" - "\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n" - "\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n" - "\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n" - "\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n" - "\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n" - "\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n" - "\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n" - "\tRA==\r\n" - "\t-----END CERTIFICATE-----\r\n" - "\r\n"; - test_simple(dumbfuck2, HPE_OK); - - const char *corrupted_connection = - "GET / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "Connection\r\033\065\325eep-Alive\r\n" - "Accept-Encoding: gzip\r\n" - "\r\n"; - test_simple(corrupted_connection, HPE_INVALID_HEADER_TOKEN); - - const char *corrupted_header_name = - "GET / HTTP/1.1\r\n" - "Host: www.example.com\r\n" - "X-Some-Header\r\033\065\325eep-Alive\r\n" - "Accept-Encoding: gzip\r\n" - "\r\n"; - test_simple(corrupted_header_name, HPE_INVALID_HEADER_TOKEN); - -#if 0 - // NOTE(Wed Nov 18 11:57:27 CET 2009) this seems okay. we just read body - // until EOF. - // - // no content-length - // error if there is a body without content length - const char *bad_get_no_headers_no_body = "GET /bad_get_no_headers_no_body/world HTTP/1.1\r\n" - "Accept: */*\r\n" - "\r\n" - "HELLO"; - test_simple(bad_get_no_headers_no_body, 0); -#endif - /* TODO sending junk and large headers gets rejected */ - - - /* check to make sure our predefined requests are okay */ - for (i = 0; requests[i].name; i++) { - test_message(&requests[i]); - } - - for (i = 0; i < request_count; i++) { - test_message_pause(&requests[i]); - } - - for (i = 0; i < request_count; i++) { - if (!requests[i].should_keep_alive) continue; - for (j = 0; j < request_count; j++) { - if (!requests[j].should_keep_alive) continue; - for (k = 0; k < request_count; k++) { - test_multiple3(&requests[i], &requests[j], &requests[k]); - } - } - } - - printf("request scan 1/4 "); - test_scan( &requests[GET_NO_HEADERS_NO_BODY] - , &requests[GET_ONE_HEADER_NO_BODY] - , &requests[GET_NO_HEADERS_NO_BODY] - ); - - printf("request scan 2/4 "); - test_scan( &requests[POST_CHUNKED_ALL_YOUR_BASE] - , &requests[POST_IDENTITY_BODY_WORLD] - , &requests[GET_FUNKY_CONTENT_LENGTH] - ); - - printf("request scan 3/4 "); - test_scan( &requests[TWO_CHUNKS_MULT_ZERO_END] - , &requests[CHUNKED_W_TRAILING_HEADERS] - , &requests[CHUNKED_W_BULLSHIT_AFTER_LENGTH] - ); - - printf("request scan 4/4 "); - test_scan( &requests[QUERY_URL_WITH_QUESTION_MARK_GET] - , &requests[PREFIX_NEWLINE_GET ] - , &requests[CONNECT_REQUEST] - ); - - puts("requests okay"); - - return 0; -} diff --git a/doc/api/http.markdown b/doc/api/http.markdown index 5cf3b07a9cd778..8a9bb3f53031a7 100644 --- a/doc/api/http.markdown +++ b/doc/api/http.markdown @@ -43,12 +43,6 @@ list like the following: 'Host', 'mysite.com', 'accepT', '*/*' ] -## http.METHODS - -* {Array} - -A list of the HTTP methods that are supported by the parser. - ## http.STATUS_CODES * {Object} diff --git a/doc/api/process.markdown b/doc/api/process.markdown index ebff3d6496a644..f2586843f03183 100644 --- a/doc/api/process.markdown +++ b/doc/api/process.markdown @@ -632,8 +632,7 @@ A property exposing version strings of io.js and its dependencies. Will print something like: - { http_parser: '2.3.0', - node: '1.1.1', + { node: '1.1.1', v8: '4.1.0.14', uv: '1.3.0', zlib: '1.2.8', @@ -660,7 +659,6 @@ An example of the possible output looks like: node_install_npm: 'true', node_prefix: '', node_shared_cares: 'false', - node_shared_http_parser: 'false', node_shared_libuv: 'false', node_shared_zlib: 'false', node_use_dtrace: 'false', diff --git a/lib/_http_client.js b/lib/_http_client.js index a7d714f7e0b0b2..b047107a5513c5 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -4,7 +4,7 @@ const util = require('util'); const net = require('net'); const url = require('url'); const EventEmitter = require('events').EventEmitter; -const HTTPParser = process.binding('http_parser').HTTPParser; +const HTTPParser = require('_http_parser'); const assert = require('assert').ok; const common = require('_http_common'); const httpSocketSetup = common.httpSocketSetup; diff --git a/lib/_http_common.js b/lib/_http_common.js index 757032929444b1..62d568bce1c8b3 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -1,7 +1,7 @@ 'use strict'; const FreeList = require('internal/freelist').FreeList; -const HTTPParser = process.binding('http_parser').HTTPParser; +const HTTPParser = require('_http_parser'); const incoming = require('_http_incoming'); const IncomingMessage = incoming.IncomingMessage; @@ -14,26 +14,6 @@ exports.debug = debug; exports.CRLF = '\r\n'; exports.chunkExpression = /chunk/i; exports.continueExpression = /100-continue/i; -exports.methods = HTTPParser.methods; - -const kOnHeaders = HTTPParser.kOnHeaders | 0; -const kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; -const kOnBody = HTTPParser.kOnBody | 0; -const kOnMessageComplete = HTTPParser.kOnMessageComplete | 0; - -// Only called in the slow case where slow means -// that the request headers were either fragmented -// across multiple TCP packets or too large to be -// processed in a single run. This method is also -// called to process trailing HTTP headers. -function parserOnHeaders(headers, url) { - // Once we exceeded headers limit - stop collecting them - if (this.maxHeaderPairs <= 0 || - this._headers.length < this.maxHeaderPairs) { - this._headers = this._headers.concat(headers); - } - this._url += url; -} // `headers` and `url` are set only if .onHeaders() has not been called for // this request. @@ -43,22 +23,11 @@ function parserOnHeadersComplete(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { var parser = this; - - if (!headers) { - headers = parser._headers; - parser._headers = []; - } - - if (!url) { - url = parser._url; - parser._url = ''; - } - - parser.incoming = new IncomingMessage(parser.socket); - parser.incoming.httpVersionMajor = versionMajor; - parser.incoming.httpVersionMinor = versionMinor; - parser.incoming.httpVersion = versionMajor + '.' + versionMinor; - parser.incoming.url = url; + var stream = parser.incoming = new IncomingMessage(parser.socket); + stream.httpVersionMajor = versionMajor; + stream.httpVersionMinor = versionMinor; + stream.httpVersion = versionMajor + '.' + versionMinor; + stream.url = url; var n = headers.length; @@ -66,18 +35,18 @@ function parserOnHeadersComplete(versionMajor, versionMinor, headers, method, if (parser.maxHeaderPairs > 0) n = Math.min(n, parser.maxHeaderPairs); - parser.incoming._addHeaderLines(headers, n); + stream._addHeaderLines(headers, n); - if (typeof method === 'number') { + if (typeof method === 'string') { // server only - parser.incoming.method = HTTPParser.methods[method]; + stream.method = method; } else { // client only - parser.incoming.statusCode = statusCode; - parser.incoming.statusMessage = statusMessage; + stream.statusCode = statusCode; + stream.statusMessage = statusMessage; } - parser.incoming.upgrade = upgrade; + stream.upgrade = upgrade; var skipBody = false; // response to HEAD or CONNECT @@ -85,7 +54,7 @@ function parserOnHeadersComplete(versionMajor, versionMinor, headers, method, // For upgraded connections and CONNECT method request, we'll emit this // after parser.execute so that we can capture the first part of the new // protocol. - skipBody = parser.onIncoming(parser.incoming, shouldKeepAlive); + skipBody = parser.onIncoming(stream, shouldKeepAlive); } return skipBody; @@ -118,14 +87,11 @@ function parserOnMessageComplete() { if (stream) { stream.complete = true; - // Emit any trailing headers. - var headers = parser._headers; - if (headers) { - parser.incoming._addHeaderLines(headers, headers.length); - parser._headers = []; - parser._url = ''; - } - + // Add any trailing headers. + var headers = parser.headers; + var headerscnt = headers.length; + if (headerscnt > 0) + stream._addHeaderLines(headers, headerscnt); // For emit end event stream.push(null); } @@ -138,18 +104,9 @@ function parserOnMessageComplete() { var parsers = new FreeList('parsers', 1000, function() { var parser = new HTTPParser(HTTPParser.REQUEST); - parser._headers = []; - parser._url = ''; - - // Only called in the slow case where slow means - // that the request headers were either fragmented - // across multiple TCP packets or too large to be - // processed in a single run. This method is also - // called to process trailing HTTP headers. - parser[kOnHeaders] = parserOnHeaders; - parser[kOnHeadersComplete] = parserOnHeadersComplete; - parser[kOnBody] = parserOnBody; - parser[kOnMessageComplete] = parserOnMessageComplete; + parser.onHeaders = parserOnHeadersComplete; + parser.onBody = parserOnBody; + parser.onComplete = parserOnMessageComplete; return parser; }); @@ -165,7 +122,6 @@ exports.parsers = parsers; // should be all that is needed. function freeParser(parser, req, socket) { if (parser) { - parser._headers = []; parser.onIncoming = null; if (parser.socket) parser.socket.parser = null; diff --git a/lib/_http_parser.js b/lib/_http_parser.js new file mode 100644 index 00000000000000..452e580c94bdee --- /dev/null +++ b/lib/_http_parser.js @@ -0,0 +1,907 @@ +/* +Misc differences with joyent/http-parser: + * Folding whitespace behavior is conformant with RFC 7230: + + "A user agent that receives an obs-fold in a response message that is + not within a message/http container MUST replace each received + obs-fold with one or more SP octets prior to interpreting the field + value." + + It should also be noted that RFC 7230 now deprecates line folding for HTTP + parsing, FWIW. This parser replaces folds with a single SP octet. + + * Optional whitespace is removed before interpreting a header field value, as + suggested by RFC 7230: + + "A field value might be preceded and/or followed by optional + whitespace (OWS); a single SP preceding the field-value is preferred + for consistent readability by humans. The field value does not + include any leading or trailing whitespace: OWS occurring before the + first non-whitespace octet of the field value or after the last + non-whitespace octet of the field value ought to be excluded by + parsers when extracting the field value from a header field." + + joyent/http-parser keeps trailing whitespace. This parser keeps neither + preceding nor trailing whitespace. + + * Does not allow spaces (which are invalid) in header field names. + + * Smaller maximum chunk/content length (2^53-1 vs 2^64-2). Obviously it's + not *impossible* to handle a full 64-bit length, but it would mean adding + some kind of "big integer" library for lengths > 2^53-1. + + * No special handling for `Proxy-Connection` header. The reason for this is + that `Proxy-Connection` was an experimental header for HTTP 1.0 user agents + that ended up being a bad idea because of the confusion it can bring. You + can read a bit more about this in + [RFC 7230 A.1.2](https://tools.ietf.org/html/rfc7230#page-79) +*/ +'use strict'; + +var inspect = require('util').inspect; + +var REQUEST = HTTPParser.REQUEST = 0; +var RESPONSE = HTTPParser.RESPONSE = 1; + +var CR = 13; +var LF = 10; + +var MAX_CHUNK_SIZE = Number.MAX_SAFE_INTEGER; // 9007199254740991 + +// RFC 7230 recommends HTTP implementations support at least 8000 bytes for +// the request line. We use 8190 by default, the same as Apache. +HTTPParser.MAX_REQ_LINE = 8190; +// RFC 7230 does not have any recommendations for minimum response line length +// support. Judging by the (current) longest standard status reason text, the +// typical response line will be 44 bytes or less (not including (CR)LF). Since +// the reason text field is free form though, we will roughly triple that +// amount for the default. +HTTPParser.MAX_RES_LINE = 128; + +// This is the total limit for start line + all headers was copied from +// joyent/http-parser. +var MAX_HEADER_BYTES = 80 * 1024; + +var RE_CONN_CLOSE = /(?:^|[\t ,]+)close(?:\r?$||[\t ,]+)/i; +var RE_CONN_KEEPALIVE = /(?:^|[\t ,]+)keep\-alive(?:\r?$|[\t ,]+)/i; +var RE_CONN_UPGRADE = /(?:^|[\t ,]+)upgrade(?:\r?$|[\t ,]+)/i; +var RE_TE_CHUNKED = /(?:^|[\t ,]+)chunked(?:\r?$|[\t ,]+)/i; +var CC_CONNECT = 'connect'.split('').map(getFirstCharCode); +var CC_UPGRADE = 'upgrade'.split('').map(getFirstCharCode); +var CC_CONNECTION = 'connection'.split('').map(getFirstCharCode); +var RE_CONTLEN = /Content\-Length/i; +var RE_TE = /Transfer\-Encoding/i; +var REQ_HTTP_VER_BYTES = ' HTTP/1.'.split('').map(getFirstCharCode); +var RES_HTTP_VER_BYTES = REQ_HTTP_VER_BYTES.slice(1); +REQ_HTTP_VER_BYTES.reverse(); + +var STATE_REQ_LINE = 0; +var STATE_STATUS_LINE = 1; +var STATE_HEADER = 2; +var STATE_BODY_LITERAL = 3; +var STATE_BODY_EOF = 4; +var STATE_BODY_CHUNKED_SIZE = 5; +var STATE_BODY_CHUNKED_SIZE_IGNORE = 6; +var STATE_BODY_CHUNKED_BYTES = 7; +var STATE_BODY_CHUNKED_BYTES_LF = 8; +var STATE_COMPLETE = 9; + +var FLAG_CHUNKED = 1 << 0; +var FLAG_CONNECTION_KEEP_ALIVE = 1 << 1; +var FLAG_CONNECTION_CLOSE = 1 << 2; +var FLAG_CONNECTION_UPGRADE = 1 << 3; +var FLAG_TRAILING = 1 << 4; +var FLAG_UPGRADE = 1 << 5; +var FLAG_SKIPBODY = 1 << 6; +var FLAG_SHOULD_KEEP_ALIVE = 1 << 7; +var FLAG_CONNECT_METHOD = 1 << 8; +var FLAG_ANY_UPGRADE = FLAG_UPGRADE | FLAG_CONNECTION_UPGRADE; + +function HTTPParser(type) { + if (type === RESPONSE) { + this.type = type; + this._state = STATE_STATUS_LINE; + } + this.headers = []; +} +HTTPParser.prototype.type = REQUEST; +HTTPParser.prototype._state = STATE_REQ_LINE; +HTTPParser.prototype._err = null; +HTTPParser.prototype._flags = 0; +HTTPParser.prototype._contentLen = null; +HTTPParser.prototype._nbytes = null; +HTTPParser.prototype._nhdrbytes = 0; +HTTPParser.prototype._nhdrpairs = 0; +HTTPParser.prototype._buf = ''; +HTTPParser.prototype.httpMajor = 1; +HTTPParser.prototype.httpMinor = null; +HTTPParser.prototype.maxHeaderPairs = 2000; +HTTPParser.prototype.method = null; +HTTPParser.prototype.url = null; +HTTPParser.prototype.statusCode = null; +HTTPParser.prototype.statusText = null; + +HTTPParser.prototype.onHeaders = null; +HTTPParser.prototype.onBody = null; +HTTPParser.prototype.onComplete = null; + +HTTPParser.prototype.onIncoming = null; +HTTPParser.prototype.incoming = null; +HTTPParser.prototype.socket = null; + +HTTPParser.prototype.reinitialize = function(type) { + this.execute = this._executeStartLine; + this.type = type; + if (type === REQUEST) + this._state = STATE_REQ_LINE; + else + this._state = STATE_STATUS_LINE; + + this._err = null; + this._flags = 0; + this._contentLen = null; + this._nbytes = null; + this._nhdrbytes = 0; + this._nhdrpairs = 0; + this._buf = ''; + + // Common properties + this.headers = []; + this.httpMajor = 1; + this.httpMinor = null; + this.maxHeaderPairs = 2000; + + // Request properties + this.method = null; + this.url = null; + + // Response properties + this.statusCode = null; + this.statusText = null; +}; +HTTPParser.prototype.finish = function() { + var state = this._state; + if (state === STATE_BODY_EOF) { + this.execute = this._executeBodyIgnore; + this._state = STATE_COMPLETE; + this.onComplete && this.onComplete(); + } else if (this.execute !== this._executeError && + state !== STATE_REQ_LINE && + state !== STATE_STATUS_LINE && + state !== STATE_COMPLETE) { + return this._setError('Invalid EOF state'); + } +}; +HTTPParser.prototype.close = function() {}; +HTTPParser.prototype.pause = function() {}; +HTTPParser.prototype.resume = function() {}; +HTTPParser.prototype._setError = function(msg) { + var err = new Error(msg); + this.execute = this._executeError; + this._err = err; + return err; +}; +HTTPParser.prototype._headersEnd = function() { + var flags = this._flags; + var type = this.type; + var method = this.method; + var upgrade = ((flags & FLAG_ANY_UPGRADE) === FLAG_ANY_UPGRADE || + (flags & FLAG_CONNECT_METHOD) > 0); + var contentLen = this._contentLen; + var httpMajor = this.httpMajor; + var httpMinor = this.httpMinor; + var statusCode = this.statusCode; + var keepalive = this._shouldKeepAlive(httpMajor, httpMinor, flags, type, + statusCode); + var ret; + + this._buf = ''; + this._nbytes = null; + + if ((flags & FLAG_CHUNKED) > 0) { + this._state = STATE_BODY_CHUNKED_SIZE; + this.execute = this._executeBodyChunked; + } else if (contentLen !== null) { + this._state = STATE_BODY_LITERAL; + this.execute = this._executeBodyLiteral; + if (keepalive) + this._flags |= FLAG_SHOULD_KEEP_ALIVE; + } else { + this._state = STATE_BODY_EOF; + this.execute = this._executeBodyEOF; + } + + var headers = this.headers; + var headerslen = headers.length; + if ((flags & FLAG_TRAILING) > 0) { + if (headerslen > 0) + headers[headerslen - 1] = trim(headers[headerslen - 1]); + this.onComplete && this.onComplete(); + if (!keepalive) + this._state = STATE_COMPLETE; + else + this.reinitialize(type); + return; + } else { + this.headers = []; + if (this.onHeaders) { + if (headerslen > 0) + headers[headerslen - 1] = trim(headers[headerslen - 1]); + ret = this.onHeaders(httpMajor, httpMinor, headers, method, + this.url, statusCode, this.statusText, upgrade, + keepalive); + if (ret === true) + flags = (this._flags |= FLAG_SKIPBODY); + } + } + + if (upgrade) { + this.onComplete && this.onComplete(); + this._state = STATE_COMPLETE; + } else if (contentLen === 0 || + (flags & FLAG_SKIPBODY) > 0 || + ((flags & FLAG_CHUNKED) === 0 && + contentLen === null && + !this._needsEOF(flags, type, statusCode) + )) { + this.onComplete && this.onComplete(); + this.reinitialize(type); + } +}; +HTTPParser.prototype._executeStartLine = function(data) { + if (data.length === 0) + return 0; + var firstByte = data[0]; + if ((firstByte < 32 || firstByte >= 127) && firstByte !== CR && + firstByte !== LF) { + return this._setError('Invalid byte in start line'); + } + this.execute = this._executeHeader; + return this.execute(data); +}; +HTTPParser.prototype._executeHeader = function(data) { + var len = data.length; + + if (len === 0) + return 0; + + var offset = 0; + var buf = this._buf; + var nhdrbytes = this._nhdrbytes; + var state = this._state; + var headers = this.headers; + var headerslen = headers.length; + var maxHeaderPairs = this.maxHeaderPairs; + var ret; + + while (offset < len) { + ret = indexOfLF(data, len, offset); + if (ret > -1) { + // Our internal buffer contains a full line + var bytesToAdd = ret - offset; + if (bytesToAdd > 0) { + nhdrbytes += bytesToAdd; + if (state === STATE_REQ_LINE && nhdrbytes > HTTPParser.MAX_REQ_LINE) + return this._setError('Request line limit exceeded'); + else if (state === STATE_STATUS_LINE && + nhdrbytes > HTTPParser.MAX_RES_LINE) { + return this._setError('Response line limit exceeded'); + } else if (nhdrbytes > MAX_HEADER_BYTES) { + return this._setError('Header limit exceeded'); + } + buf += data.toString('binary', offset, ret); + } + + offset = ret + 1; + var buflen = buf.length; + + switch (state) { + case STATE_HEADER: + if (buflen === 0 || buf.charCodeAt(0) === CR) { + // We saw a double line ending + this._headersEnd(); + state = this._state; + if (state < STATE_COMPLETE && offset < len) { + // Execute extra body bytes + ret = this.execute(data.slice(offset)); + if (typeof ret !== 'number') + return ret; + return offset + ret; + } else if (state === STATE_COMPLETE) { + this.reinitialize(this.type); + } + return offset; + } + var idx = -1; + var fieldName; + var fieldValue; + var valueStart = -1; + var validFieldName = true; + for (var i = 0; i < buflen; ++i) { + var ch = buf.charCodeAt(i); + if (idx === -1) { + if (ch === 58) { // ':' + if (i === 0 || !validFieldName) + return this._setError('Malformed header line'); + idx = i; + } else if (validFieldName && (ch < 33 || ch > 126)) { + validFieldName = false; + } + } else if (ch !== 32 && ch !== 9) { + valueStart = i; + break; + } + } + if (idx === -1) { + var firstChr = buf.charCodeAt(0); + if (firstChr !== 32 & firstChr !== 9) + return this._setError('Malformed header line'); + // RFC 7230 compliant, but less backwards compatible: + var extra = ltrim(buf); + if (extra.length > 0) { + if (headerslen === 0) + return this._setError('Malformed header line'); + fieldName = headers[headerslen - 2]; + fieldValue = headers[headerslen - 1]; + if (fieldValue.length > 0) { + if (fieldValue.charCodeAt(fieldValue.length - 1) === CR) + fieldValue = fieldValue.slice(0, -1); + if (fieldValue.length > 0) + fieldValue += ' ' + extra; + else + fieldValue = extra; + } else { + fieldValue = extra; + } + headers[headerslen - 1] = fieldValue; + idx = fieldName.length; + } + } else { + fieldName = buf.slice(0, idx); + fieldValue = valueStart === -1 ? '' : buf.slice(valueStart); + // Ensures that trailing whitespace after the last folded line for + // header values gets trimmed + if (headerslen > 0) + headers[headerslen - 1] = rtrim(headers[headerslen - 1]); + if (maxHeaderPairs <= 0 || ++this._nhdrpairs < maxHeaderPairs) { + headers.push(fieldName, fieldValue); + headerslen += 2; + } + } + switch (idx) { + case 7: + for (var i = 0; i < 7; ++i) { + if ((fieldName.charCodeAt(i) | 0x20) !== CC_UPGRADE[i]) + break; + } + if (i === 7) + this._flags |= FLAG_UPGRADE; + break; + case 10: + for (var i = 0; i < 10; ++i) { + if ((fieldName.charCodeAt(i) | 0x20) !== CC_CONNECTION[i]) + break; + } + if (i === 10) { + if (fieldValue.search(RE_CONN_CLOSE) > -1) + this._flags |= FLAG_CONNECTION_CLOSE; + if (fieldValue.search(RE_CONN_KEEPALIVE) > -1) + this._flags |= FLAG_CONNECTION_KEEP_ALIVE; + if (fieldValue.search(RE_CONN_UPGRADE) > -1) + this._flags |= FLAG_CONNECTION_UPGRADE; + } + break; + case 14: + // Somewhere between 10 and 14 characters, the previously used + // for loop algorithm becomes slower than doing a search using a + // simple, case-insensitive, non-anchored regexp + if (fieldValue.length > 0 && + fieldName.search(RE_CONTLEN) === 0) { + var val = parseInt(fieldValue, 10); + if (val !== val || val > MAX_CHUNK_SIZE) + return this._setError('Bad Content-Length'); + this._contentLen = val; + } + break; + case 17: + if (fieldName.search(RE_TE) === 0 && + fieldValue.search(RE_TE_CHUNKED) > -1) { + this._flags |= FLAG_CHUNKED; + } + break; + } + break; + case STATE_REQ_LINE: + // Original HTTP parser ignored blank lines before request/status + // line, so we do that here too ... + if (buflen === 0 || buf.charCodeAt(0) === CR) + break; + + var firstSP; + var urlStart; + var urlEnd; + var minor; + var end = (buf.charCodeAt(buflen - 1) === CR ? + buflen - 3 : buflen - 2); + // Start working backwards and both validate that the line ends in + // ` HTTP/1.[01]` and find the end of the URL (in case there are + // multiple spaces/tabs separating the URL and HTTP version + var ch = buf.charCodeAt(end + 1); + if (ch === 49) + minor = 1; + else if (ch === 48) + minor = 0; + else + return this._setError('Malformed request line'); + var h = 0; + while (end >= 0) { + var ch = buf.charCodeAt(end); + if (h < 8) { + if (ch !== REQ_HTTP_VER_BYTES[h++]) + return this._setError('Malformed request line'); + } else if (ch >= 33 && ch !== 127) { + urlEnd = end + 1; + break; + } + --end; + } + if (urlEnd === undefined) + return this._setError('Malformed request line'); + + // Now start working forwards and both validate the HTTP method and + // find the start of the URL (in case there are multiple spaces/tabs + // separating the method and the URL + var isConnect = false; + var c = 0; + for (var i = 0; i < urlEnd; ++i) { + ch = buf.charCodeAt(i); + if (firstSP !== undefined) { + if (ch >= 33 && ch !== 127) { + urlStart = i; + break; + } + } else if (ch === 32) { + firstSP = i; + isConnect = (c === 7); + } else if (ch < 33 || ch > 126) + return this._setError('Malformed request line'); + else if (c >= 0) { + if (c === 7) + c = -1; + else { + ch |= 0x20; + if (ch !== CC_CONNECT[c]) + c = -1; + else + ++c; + } + } + } + if (firstSP === undefined || + urlStart === undefined || + urlStart === urlEnd) { + return this._setError('Malformed request line'); + } + + var url = buf.slice(urlStart, urlEnd); + if (!validateUrl(url, isConnect)) + return this._setError('Malformed request line'); + + if (isConnect) + this._flags |= FLAG_CONNECT_METHOD; + this.httpMinor = minor; + this.method = buf.slice(0, firstSP); + this.url = url; + + state = STATE_HEADER; + break; + case STATE_STATUS_LINE: + // Original HTTP parser ignored blank lines before request/status + // line, so we do that here too ... + if (buflen === 0 || buf.charCodeAt(0) === CR) + break; + + // Validate HTTP version + for (var h = 0; i < 7; ++h) { + if (buf.charCodeAt(i) !== RES_HTTP_VER_BYTES[h]) + return this._setError('Malformed status line'); + } + var minor; + var status = 0; + if (buf.charCodeAt(7) === 49) + minor = 1; + else if (buf.charCodeAt(7) === 48) + minor = 0; + else + return this._setError('Malformed status line'); + if (buf.charCodeAt(8) !== 32) + return this._setError('Malformed status line'); + + // Validate status code + for (var i = 9; i < 12; ++i) { + var ch = buf.charCodeAt(i); + if (ch < 48 || ch > 57) + return this._setError('Malformed status line'); + status *= 10; + status += (ch - 48); + } + + if (buf.charCodeAt(buflen - 1)) + --buflen; + this.httpMinor = minor; + this.statusCode = status; + this.statusText = (buflen > 13 ? buf.slice(13, buflen) : ''); + state = STATE_HEADER; + break; + default: + return this._setError('Unexpected HTTP parser state: ' + state); + } + + buf = ''; + } else { + nhdrbytes += len - offset; + if (state === STATE_REQ_LINE && nhdrbytes > HTTPParser.MAX_REQ_LINE) + return this._setError('Request line limit exceeded'); + else if (state === STATE_STATUS_LINE && + nhdrbytes > HTTPParser.MAX_RES_LINE) { + return this._setError('Response line limit exceeded'); + } else if (nhdrbytes > MAX_HEADER_BYTES) { + return this._setError('Header limit exceeded'); + } + buf += data.toString('binary', offset); + break; + } + } + + this._state = state; + this._buf = buf; + this._nhdrbytes = nhdrbytes; + + return len; +}; +HTTPParser.prototype._executeBodyChunked = function(data) { + var len = data.length; + + if (len === 0) + return 0; + + var offset = 0; + var nbytes = this._nbytes; + var state = this._state; + var dec; + + while (offset < len) { + switch (state) { + case STATE_BODY_CHUNKED_SIZE: + while (offset < len) { + var ch = data[offset]; + dec = hexValue(ch); + if (dec === undefined) { + if (nbytes === null) + return this._setError('Invalid chunk size'); + state = STATE_BODY_CHUNKED_SIZE_IGNORE; + break; + } else if (nbytes === null) + nbytes = dec; + else { + nbytes *= 16; + nbytes += dec; + } + if (nbytes > MAX_CHUNK_SIZE) + return this._setError('Chunk size limit exceeded'); + ++offset; + } + break; + case STATE_BODY_CHUNKED_BYTES: + var dataleft = len - offset; + if (dataleft >= nbytes) { + this.onBody(data, offset, nbytes); + offset += nbytes; + nbytes = 0; + state = STATE_BODY_CHUNKED_BYTES_LF; + } else { + nbytes -= dataleft; + this.onBody(data, offset, dataleft); + offset = len; + } + break; + case STATE_BODY_CHUNKED_BYTES_LF: + // We reach this state after all chunk bytes have been read and we are + // looking for the (CR)LF following the bytes + while (offset < len) { + var curByte = data[offset++]; + if (nbytes === 0) { + if (curByte === LF) { + state = STATE_BODY_CHUNKED_SIZE; + nbytes = null; + break; + } else if (curByte === CR) { + ++nbytes; + } + } else if (nbytes === 1 && curByte === LF) { + state = STATE_BODY_CHUNKED_SIZE; + nbytes = null; + break; + } else { + return this._setError('Malformed chunk (malformed line ending)'); + } + } + break; + case STATE_BODY_CHUNKED_SIZE_IGNORE: + // We only reach this state once we receive a non-hex character on the + // chunk size line + while (offset < len) { + if (data[offset++] === LF) { + if (nbytes === 0) { + this._flags |= FLAG_TRAILING; + this._state = STATE_HEADER; + this._nbytes = null; + this.execute = this._executeHeader; + if (offset < len) { + var ret = this.execute(data.slice(offset)); + if (typeof ret !== 'number') + return ret; + return offset + ret; + } + return offset; + } else { + state = STATE_BODY_CHUNKED_BYTES; + break; + } + } + } + break; + default: + return this._setError('Unexpected parser state while reading chunks'); + } + } + + this._state = state; + this._nbytes = nbytes; + + return len; +}; +HTTPParser.prototype._executeBodyLiteral = function(data) { + var len = data.length; + + if (len === 0) + return 0; + + var nbytes = this._contentLen; + if (len >= nbytes) { + this.onBody(data, 0, nbytes); + this.onComplete && this.onComplete(); + if ((this._flags & FLAG_SHOULD_KEEP_ALIVE) > 0) { + this.reinitialize(this.type); + if (len > nbytes) { + var ret = this.execute(data.slice(nbytes)); + if (typeof ret !== 'number') + return ret; + return nbytes + ret; + } + } else { + this._state = STATE_COMPLETE; + } + } else { + this._contentLen -= len; + this.onBody(data, 0, len); + } + return len; +}; +HTTPParser.prototype._executeBodyEOF = function(data) { + var len = data.length; + + if (len === 0) + return 0; + + this.onBody(data, 0, len); + return len; +}; +HTTPParser.prototype._executeBodyIgnore = function(data) { + return 0; +}; +HTTPParser.prototype._executeError = function(data) { + return this._err; +}; +HTTPParser.prototype.execute = HTTPParser.prototype._executeStartLine; +HTTPParser.prototype._shouldKeepAlive = function(httpMajor, httpMinor, flags, + type, status) { + if (httpMajor > 0 && httpMinor > 0) { + if ((flags & FLAG_CONNECTION_CLOSE) > 0) + return false; + } else { + if ((flags & FLAG_CONNECTION_KEEP_ALIVE) === 0) + return false; + } + return !this._needsEOF(flags, type, status); +}; +HTTPParser.prototype._needsEOF = function(flags, type, status) { + if (type === REQUEST) + return false; + + // See RFC 2616 section 4.4 + if (status === 204 || // No Content + status === 304 || // Not Modified + (status >= 100 && status < 200) || // 1xx e.g. Continue + (flags & FLAG_SKIPBODY) > 0) { // response to a HEAD request + return false; + } + + if ((flags & FLAG_CHUNKED) > 0 || this._contentLen !== null) + return false; + + return true; +}; + + + + +module.exports = HTTPParser; + +function indexOfLF(buf, buflen, offset) { + while (offset < buflen) { + if (buf[offset] === LF) + return offset; + ++offset; + } + return -1; +} + +function hexValue(ch) { + if (ch > 47 && ch < 58) + return ch - 48; + ch |= 0x20; + if (ch > 96 && ch < 103) + return 10 + (ch - 97); +} + +function ltrim(value) { + var length = value.length, start; + for (start = 0; + start < length && + (value.charCodeAt(start) === 32 || value.charCodeAt(start) === 9 || + value.charCodeAt(start) === CR); + ++start); + return (start > 0 ? value.slice(start) : value); +} + +function rtrim(value) { + var length = value.length, end; + for (end = length; + end > 0 && + (value.charCodeAt(end - 1) === 32 || value.charCodeAt(end - 1) === 9 || + value.charCodeAt(end - 1) === CR); + --end); + return (end < length ? value.slice(0, end) : value); +} + +function trim(value) { + var length = value.length, start, end; + for (start = 0; + start < length && + (value.charCodeAt(start) === 32 || value.charCodeAt(start) === 9 || + value.charCodeAt(start) === CR); + ++start); + for (end = length; + end > start && + (value.charCodeAt(end - 1) === 32 || value.charCodeAt(end - 1) === 9 || + value.charCodeAt(end - 1) === CR); + --end); + return (start > 0 || end < length ? value.slice(start, end) : value); +} + +function validateUrl(url, isConnect) { + var state = (isConnect ? 5 : 0); + var ch; + for (var i = 0; i < url.length; ++i) { + ch = url.charCodeAt(i); + switch (state) { + case 0: + // Proxied requests are followed by scheme of an absolute URI (alpha). + // All methods except CONNECT are followed by '/' or '*'. + if (ch === 47 || // '/' + ch === 42) { // '*' + state = 6; + continue; + } else if ((ch |= 0x20) > 96 && ch < 123) { // A-Za-z + state = 1; + continue; + } + break; + case 1: + if (ch === 58) { // ':' + state = 2; + continue; + } else if ((ch |= 0x20) > 96 && ch < 123) { // A-Za-z + continue; + } + break; + case 2: + if (ch === 47) { // '/' + state = 3; + continue; + } + break; + case 3: + if (ch === 47) { // '/' + state = 5; + continue; + } + break; + case 4: + if (ch === 64) // '@' + return false; + // falls through + case 5: + if (ch === 47) { // '/' + state = 6; + continue; + } else if (ch === 63) { // '?' + state = 7; + continue; + } else if (ch === 64) { // '@' + state = 4; + continue; + } else if ((ch > 35 && ch < 58) || // 0-9 + ch === 33 || // '!' + ch === 58 || // ':' + ch === 59 || // ';' + ch === 61 || // '=' + ch === 91 || // '[' + ch === 93 || // ']' + ch === 95 || // '_' + ch === 126 || // '~' + ((ch |= 0x20) > 96 && ch < 123)) { // A-Za-z + continue; + } + break; + case 6: + if (ch === 63) { // '?' + state = 7; + continue; + } else if (ch === 35) { // '#' + state = 8; + continue; + // http://jsperf.com/check-url-character + } else if (!(ch < 33 || ch === 127)) { // Normal URL characters + continue; + } + break; + case 7: + if (ch === 63) { // '?' + // Allow extra '?' in query string + continue; + } else if (ch === 35) { // '#' + state = 8; + continue; + } else if (!(ch < 33 || ch === 127)) { // Normal URL characters + continue; + } + break; + case 8: + if (ch === 63) { // '?' + state = 9; + continue; + } else if (ch === 35) // '#' + continue; + else if (!(ch < 33 || ch === 127)) { // Normal URL characters + state = 9; + continue; + } + break; + case 9: + if (ch === 63 || // '?' + ch === 35 || // '#' + !(ch < 33 || ch === 127)) { // Normal URL characters + continue; + } + break; + } + return false; + } + return true; +} + +function getFirstCharCode(str) { + return str.charCodeAt(0); +} diff --git a/lib/_http_server.js b/lib/_http_server.js index b696b9bc0cac6c..9a4e053095517b 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -3,7 +3,7 @@ const util = require('util'); const net = require('net'); const EventEmitter = require('events').EventEmitter; -const HTTPParser = process.binding('http_parser').HTTPParser; +const HTTPParser = require('_http_parser'); const assert = require('assert').ok; const common = require('_http_common'); const parsers = common.parsers; @@ -352,12 +352,6 @@ function connectionListener(socket) { socket.destroy(); } } - - if (socket._paused) { - // onIncoming paused the socket, we should pause the parser as well - debug('pause parser'); - socket.parser.pause(); - } } function socketOnEnd() { @@ -392,7 +386,6 @@ function connectionListener(socket) { // If we previously paused, then start reading again. if (socket._paused) { socket._paused = false; - socket.parser.resume(); socket.resume(); } } diff --git a/lib/http.js b/lib/http.js index a9cfeddeea5cfa..c941d2ba7b5d4e 100644 --- a/lib/http.js +++ b/lib/http.js @@ -6,9 +6,43 @@ const EventEmitter = require('events').EventEmitter; exports.IncomingMessage = require('_http_incoming').IncomingMessage; - -const common = require('_http_common'); -exports.METHODS = common.methods.slice().sort(); +// Request Methods +var METHODS = [ + 'DELETE', + 'GET', + 'HEAD', + 'POST', + 'PUT', + // pathological + 'CONNECT', + 'OPTIONS', + 'TRACE', + // webdav + 'COPY', + 'LOCK', + 'MKCOL', + 'MOVE', + 'PROPFIND', + 'PROPPATCH', + 'SEARCH', + 'UNLOCK', + // subversion + 'REPORT', + 'MKACTIVITY', + 'CHECKOUT', + 'MERGE', + // upnp + 'MSEARCH', + 'NOTIFY', + 'SUBSCRIBE', + 'UNSUBSCRIBE', + // RFC-5789 + 'PATCH', + 'PURGE' +]; +exports.__defineGetter__('METHODS', util.deprecate(function() { + return METHODS; +}, 'http.METHODS will be removed soon. Do not use it.')); exports.OutgoingMessage = require('_http_outgoing').OutgoingMessage; diff --git a/node.gyp b/node.gyp index 2b530f15f17ec2..e87e2d995d9664 100644 --- a/node.gyp +++ b/node.gyp @@ -7,7 +7,6 @@ 'node_use_perfctr%': 'false', 'node_has_winsdk%': 'false', 'node_shared_zlib%': 'false', - 'node_shared_http_parser%': 'false', 'node_shared_libuv%': 'false', 'node_use_openssl%': 'true', 'node_shared_openssl%': 'false', @@ -37,6 +36,7 @@ 'lib/_http_common.js', 'lib/_http_incoming.js', 'lib/_http_outgoing.js', + 'lib/_http_parser.js', 'lib/_http_server.js', 'lib/https.js', 'lib/module.js', @@ -112,7 +112,6 @@ 'src/node_constants.cc', 'src/node_contextify.cc', 'src/node_file.cc', - 'src/node_http_parser.cc', 'src/node_javascript.cc', 'src/node_main.cc', 'src/node_os.cc', @@ -148,7 +147,6 @@ 'src/node_buffer.h', 'src/node_constants.h', 'src/node_file.h', - 'src/node_http_parser.h', 'src/node_internals.h', 'src/node_javascript.h', 'src/node_root_certs.h', @@ -171,7 +169,6 @@ 'src/util.h', 'src/util-inl.h', 'src/util.cc', - 'deps/http_parser/http_parser.h', 'deps/v8/include/v8.h', 'deps/v8/include/v8-debug.h', '<(SHARED_INTERMEDIATE_DIR)/node_natives.h', @@ -339,10 +336,6 @@ 'dependencies': [ 'deps/zlib/zlib.gyp:zlib' ], }], - [ 'node_shared_http_parser=="false"', { - 'dependencies': [ 'deps/http_parser/http_parser.gyp:http_parser' ], - }], - [ 'node_shared_libuv=="false"', { 'dependencies': [ 'deps/uv/uv.gyp:libuv' ], }], diff --git a/src/node.cc b/src/node.cc index 18d08504337e49..22f430b4b1e3f3 100644 --- a/src/node.cc +++ b/src/node.cc @@ -2,7 +2,6 @@ #include "node_buffer.h" #include "node_constants.h" #include "node_file.h" -#include "node_http_parser.h" #include "node_javascript.h" #include "node_version.h" @@ -2678,15 +2677,6 @@ void SetupProcessObject(Environment* env, Local versions = Object::New(env->isolate()); READONLY_PROPERTY(process, "versions", versions); - const char http_parser_version[] = NODE_STRINGIFY(HTTP_PARSER_VERSION_MAJOR) - "." - NODE_STRINGIFY(HTTP_PARSER_VERSION_MINOR) - "." - NODE_STRINGIFY(HTTP_PARSER_VERSION_PATCH); - READONLY_PROPERTY(versions, - "http_parser", - FIXED_ONE_BYTE_STRING(env->isolate(), http_parser_version)); - // +1 to get rid of the leading 'v' READONLY_PROPERTY(versions, "node", diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc deleted file mode 100644 index 6c5d76ecf6cc95..00000000000000 --- a/src/node_http_parser.cc +++ /dev/null @@ -1,605 +0,0 @@ -#include "node.h" -#include "node_buffer.h" -#include "node_http_parser.h" - -#include "base-object.h" -#include "base-object-inl.h" -#include "env.h" -#include "env-inl.h" -#include "util.h" -#include "util-inl.h" -#include "v8.h" - -#include // free() -#include // strdup() - -#if defined(_MSC_VER) -#define strcasecmp _stricmp -#else -#include // strcasecmp() -#endif - -// This is a binding to http_parser (https://github.com/joyent/http-parser) -// The goal is to decouple sockets from parsing for more javascript-level -// agility. A Buffer is read from a socket and passed to parser.execute(). -// The parser then issues callbacks with slices of the data -// parser.onMessageBegin -// parser.onPath -// parser.onBody -// ... -// No copying is performed when slicing the buffer, only small reference -// allocations. - - -namespace node { - -using v8::Array; -using v8::Boolean; -using v8::Context; -using v8::Exception; -using v8::Function; -using v8::FunctionCallbackInfo; -using v8::FunctionTemplate; -using v8::Handle; -using v8::HandleScope; -using v8::Integer; -using v8::Local; -using v8::Object; -using v8::String; -using v8::Uint32; -using v8::Undefined; -using v8::Value; - -const uint32_t kOnHeaders = 0; -const uint32_t kOnHeadersComplete = 1; -const uint32_t kOnBody = 2; -const uint32_t kOnMessageComplete = 3; - - -#define HTTP_CB(name) \ - static int name(http_parser* p_) { \ - Parser* self = ContainerOf(&Parser::parser_, p_); \ - return self->name##_(); \ - } \ - int name##_() - - -#define HTTP_DATA_CB(name) \ - static int name(http_parser* p_, const char* at, size_t length) { \ - Parser* self = ContainerOf(&Parser::parser_, p_); \ - return self->name##_(at, length); \ - } \ - int name##_(const char* at, size_t length) - - -// helper class for the Parser -struct StringPtr { - StringPtr() { - on_heap_ = false; - Reset(); - } - - - ~StringPtr() { - Reset(); - } - - - // If str_ does not point to a heap string yet, this function makes it do - // so. This is called at the end of each http_parser_execute() so as not - // to leak references. See issue #2438 and test-http-parser-bad-ref.js. - void Save() { - if (!on_heap_ && size_ > 0) { - char* s = new char[size_]; - memcpy(s, str_, size_); - str_ = s; - on_heap_ = true; - } - } - - - void Reset() { - if (on_heap_) { - delete[] str_; - on_heap_ = false; - } - - str_ = nullptr; - size_ = 0; - } - - - void Update(const char* str, size_t size) { - if (str_ == nullptr) - str_ = str; - else if (on_heap_ || str_ + size_ != str) { - // Non-consecutive input, make a copy on the heap. - // TODO(bnoordhuis) Use slab allocation, O(n) allocs is bad. - char* s = new char[size_ + size]; - memcpy(s, str_, size_); - memcpy(s + size_, str, size); - - if (on_heap_) - delete[] str_; - else - on_heap_ = true; - - str_ = s; - } - size_ += size; - } - - - Local ToString(Environment* env) const { - if (str_) - return OneByteString(env->isolate(), str_, size_); - else - return String::Empty(env->isolate()); - } - - - const char* str_; - bool on_heap_; - size_t size_; -}; - - -class Parser : public BaseObject { - public: - Parser(Environment* env, Local wrap, enum http_parser_type type) - : BaseObject(env, wrap), - current_buffer_len_(0), - current_buffer_data_(nullptr) { - Wrap(object(), this); - Init(type); - } - - - ~Parser() override { - ClearWrap(object()); - persistent().Reset(); - } - - - HTTP_CB(on_message_begin) { - num_fields_ = num_values_ = 0; - url_.Reset(); - status_message_.Reset(); - return 0; - } - - - HTTP_DATA_CB(on_url) { - url_.Update(at, length); - return 0; - } - - - HTTP_DATA_CB(on_status) { - status_message_.Update(at, length); - return 0; - } - - - HTTP_DATA_CB(on_header_field) { - if (num_fields_ == num_values_) { - // start of new field name - num_fields_++; - if (num_fields_ == ARRAY_SIZE(fields_)) { - // ran out of space - flush to javascript land - Flush(); - num_fields_ = 1; - num_values_ = 0; - } - fields_[num_fields_ - 1].Reset(); - } - - CHECK_LT(num_fields_, static_cast(ARRAY_SIZE(fields_))); - CHECK_EQ(num_fields_, num_values_ + 1); - - fields_[num_fields_ - 1].Update(at, length); - - return 0; - } - - - HTTP_DATA_CB(on_header_value) { - if (num_values_ != num_fields_) { - // start of new header value - num_values_++; - values_[num_values_ - 1].Reset(); - } - - CHECK_LT(num_values_, static_cast(ARRAY_SIZE(values_))); - CHECK_EQ(num_values_, num_fields_); - - values_[num_values_ - 1].Update(at, length); - - return 0; - } - - - HTTP_CB(on_headers_complete) { - // Arguments for the on-headers-complete javascript callback. This - // list needs to be kept in sync with the actual argument list for - // `parserOnHeadersComplete` in lib/_http_common.js. - enum on_headers_complete_arg_index { - A_VERSION_MAJOR = 0, - A_VERSION_MINOR, - A_HEADERS, - A_METHOD, - A_URL, - A_STATUS_CODE, - A_STATUS_MESSAGE, - A_UPGRADE, - A_SHOULD_KEEP_ALIVE, - A_MAX - }; - - Local argv[A_MAX]; - Local obj = object(); - Local cb = obj->Get(kOnHeadersComplete); - - if (!cb->IsFunction()) - return 0; - - Local undefined = Undefined(env()->isolate()); - for (size_t i = 0; i < ARRAY_SIZE(argv); i++) - argv[i] = undefined; - - if (have_flushed_) { - // Slow case, flush remaining headers. - Flush(); - } else { - // Fast case, pass headers and URL to JS land. - argv[A_HEADERS] = CreateHeaders(); - if (parser_.type == HTTP_REQUEST) - argv[A_URL] = url_.ToString(env()); - } - - num_fields_ = 0; - num_values_ = 0; - - // METHOD - if (parser_.type == HTTP_REQUEST) { - argv[A_METHOD] = - Uint32::NewFromUnsigned(env()->isolate(), parser_.method); - } - - // STATUS - if (parser_.type == HTTP_RESPONSE) { - argv[A_STATUS_CODE] = - Integer::New(env()->isolate(), parser_.status_code); - argv[A_STATUS_MESSAGE] = status_message_.ToString(env()); - } - - // VERSION - argv[A_VERSION_MAJOR] = Integer::New(env()->isolate(), parser_.http_major); - argv[A_VERSION_MINOR] = Integer::New(env()->isolate(), parser_.http_minor); - - argv[A_SHOULD_KEEP_ALIVE] = - Boolean::New(env()->isolate(), http_should_keep_alive(&parser_)); - - argv[A_UPGRADE] = Boolean::New(env()->isolate(), parser_.upgrade); - - Local head_response = - cb.As()->Call(obj, ARRAY_SIZE(argv), argv); - - if (head_response.IsEmpty()) { - got_exception_ = true; - return -1; - } - - return head_response->IsTrue() ? 1 : 0; - } - - - HTTP_DATA_CB(on_body) { - HandleScope scope(env()->isolate()); - - Local obj = object(); - Local cb = obj->Get(kOnBody); - - if (!cb->IsFunction()) - return 0; - - Local argv[3] = { - current_buffer_, - Integer::NewFromUnsigned(env()->isolate(), at - current_buffer_data_), - Integer::NewFromUnsigned(env()->isolate(), length) - }; - - Local r = cb.As()->Call(obj, ARRAY_SIZE(argv), argv); - - if (r.IsEmpty()) { - got_exception_ = true; - return -1; - } - - return 0; - } - - - HTTP_CB(on_message_complete) { - HandleScope scope(env()->isolate()); - - if (num_fields_) - Flush(); // Flush trailing HTTP headers. - - Local obj = object(); - Local cb = obj->Get(kOnMessageComplete); - - if (!cb->IsFunction()) - return 0; - - Local r = cb.As()->Call(obj, 0, nullptr); - - if (r.IsEmpty()) { - got_exception_ = true; - return -1; - } - - return 0; - } - - - static void New(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - http_parser_type type = - static_cast(args[0]->Int32Value()); - CHECK(type == HTTP_REQUEST || type == HTTP_RESPONSE); - new Parser(env, args.This(), type); - } - - - static void Close(const FunctionCallbackInfo& args) { - Parser* parser = Unwrap(args.Holder()); - delete parser; - } - - - void Save() { - url_.Save(); - status_message_.Save(); - - for (int i = 0; i < num_fields_; i++) { - fields_[i].Save(); - } - - for (int i = 0; i < num_values_; i++) { - values_[i].Save(); - } - } - - - // var bytesParsed = parser->execute(buffer); - static void Execute(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - - Parser* parser = Unwrap(args.Holder()); - CHECK(parser->current_buffer_.IsEmpty()); - CHECK_EQ(parser->current_buffer_len_, 0); - CHECK_EQ(parser->current_buffer_data_, nullptr); - CHECK_EQ(Buffer::HasInstance(args[0]), true); - - Local buffer_obj = args[0].As(); - char* buffer_data = Buffer::Data(buffer_obj); - size_t buffer_len = Buffer::Length(buffer_obj); - - // This is a hack to get the current_buffer to the callbacks with the least - // amount of overhead. Nothing else will run while http_parser_execute() - // runs, therefore this pointer can be set and used for the execution. - parser->current_buffer_ = buffer_obj; - parser->current_buffer_len_ = buffer_len; - parser->current_buffer_data_ = buffer_data; - parser->got_exception_ = false; - - size_t nparsed = - http_parser_execute(&parser->parser_, &settings, buffer_data, buffer_len); - - parser->Save(); - - // Unassign the 'buffer_' variable - parser->current_buffer_.Clear(); - parser->current_buffer_len_ = 0; - parser->current_buffer_data_ = nullptr; - - // If there was an exception in one of the callbacks - if (parser->got_exception_) - return; - - Local nparsed_obj = Integer::New(env->isolate(), nparsed); - // If there was a parse error in one of the callbacks - // TODO(bnoordhuis) What if there is an error on EOF? - if (!parser->parser_.upgrade && nparsed != buffer_len) { - enum http_errno err = HTTP_PARSER_ERRNO(&parser->parser_); - - Local e = Exception::Error(env->parse_error_string()); - Local obj = e->ToObject(env->isolate()); - obj->Set(env->bytes_parsed_string(), nparsed_obj); - obj->Set(env->code_string(), - OneByteString(env->isolate(), http_errno_name(err))); - - args.GetReturnValue().Set(e); - } else { - args.GetReturnValue().Set(nparsed_obj); - } - } - - - static void Finish(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - - Parser* parser = Unwrap(args.Holder()); - - CHECK(parser->current_buffer_.IsEmpty()); - parser->got_exception_ = false; - - int rv = http_parser_execute(&(parser->parser_), &settings, nullptr, 0); - - if (parser->got_exception_) - return; - - if (rv != 0) { - enum http_errno err = HTTP_PARSER_ERRNO(&parser->parser_); - - Local e = env->parse_error_string(); - Local obj = e->ToObject(env->isolate()); - obj->Set(env->bytes_parsed_string(), Integer::New(env->isolate(), 0)); - obj->Set(env->code_string(), - OneByteString(env->isolate(), http_errno_name(err))); - - args.GetReturnValue().Set(e); - } - } - - - static void Reinitialize(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - - http_parser_type type = - static_cast(args[0]->Int32Value()); - - CHECK(type == HTTP_REQUEST || type == HTTP_RESPONSE); - Parser* parser = Unwrap(args.Holder()); - // Should always be called from the same context. - CHECK_EQ(env, parser->env()); - parser->Init(type); - } - - - template - static void Pause(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); - Parser* parser = Unwrap(args.Holder()); - // Should always be called from the same context. - CHECK_EQ(env, parser->env()); - http_parser_pause(&parser->parser_, should_pause); - } - - - private: - - Local CreateHeaders() { - // num_values_ is either -1 or the entry # of the last header - // so num_values_ == 0 means there's a single header - Local headers = Array::New(env()->isolate(), 2 * num_values_); - - for (int i = 0; i < num_values_; ++i) { - headers->Set(2 * i, fields_[i].ToString(env())); - headers->Set(2 * i + 1, values_[i].ToString(env())); - } - - return headers; - } - - - // spill headers and request path to JS land - void Flush() { - HandleScope scope(env()->isolate()); - - Local obj = object(); - Local cb = obj->Get(kOnHeaders); - - if (!cb->IsFunction()) - return; - - Local argv[2] = { - CreateHeaders(), - url_.ToString(env()) - }; - - Local r = cb.As()->Call(obj, ARRAY_SIZE(argv), argv); - - if (r.IsEmpty()) - got_exception_ = true; - - url_.Reset(); - have_flushed_ = true; - } - - - void Init(enum http_parser_type type) { - http_parser_init(&parser_, type); - url_.Reset(); - status_message_.Reset(); - num_fields_ = 0; - num_values_ = 0; - have_flushed_ = false; - got_exception_ = false; - } - - - http_parser parser_; - StringPtr fields_[32]; // header fields - StringPtr values_[32]; // header values - StringPtr url_; - StringPtr status_message_; - int num_fields_; - int num_values_; - bool have_flushed_; - bool got_exception_; - Local current_buffer_; - size_t current_buffer_len_; - char* current_buffer_data_; - static const struct http_parser_settings settings; -}; - - -const struct http_parser_settings Parser::settings = { - Parser::on_message_begin, - Parser::on_url, - Parser::on_status, - Parser::on_header_field, - Parser::on_header_value, - Parser::on_headers_complete, - Parser::on_body, - Parser::on_message_complete, - nullptr, // on_chunk_header - nullptr // on_chunk_complete -}; - - -void InitHttpParser(Handle target, - Handle unused, - Handle context, - void* priv) { - Environment* env = Environment::GetCurrent(context); - Local t = env->NewFunctionTemplate(Parser::New); - t->InstanceTemplate()->SetInternalFieldCount(1); - t->SetClassName(FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser")); - - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "REQUEST"), - Integer::New(env->isolate(), HTTP_REQUEST)); - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "RESPONSE"), - Integer::New(env->isolate(), HTTP_RESPONSE)); - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "kOnHeaders"), - Integer::NewFromUnsigned(env->isolate(), kOnHeaders)); - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "kOnHeadersComplete"), - Integer::NewFromUnsigned(env->isolate(), kOnHeadersComplete)); - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "kOnBody"), - Integer::NewFromUnsigned(env->isolate(), kOnBody)); - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "kOnMessageComplete"), - Integer::NewFromUnsigned(env->isolate(), kOnMessageComplete)); - - Local methods = Array::New(env->isolate()); -#define V(num, name, string) \ - methods->Set(num, FIXED_ONE_BYTE_STRING(env->isolate(), #string)); - HTTP_METHOD_MAP(V) -#undef V - t->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "methods"), methods); - - env->SetProtoMethod(t, "close", Parser::Close); - env->SetProtoMethod(t, "execute", Parser::Execute); - env->SetProtoMethod(t, "finish", Parser::Finish); - env->SetProtoMethod(t, "reinitialize", Parser::Reinitialize); - env->SetProtoMethod(t, "pause", Parser::Pause); - env->SetProtoMethod(t, "resume", Parser::Pause); - - target->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"), - t->GetFunction()); -} - -} // namespace node - -NODE_MODULE_CONTEXT_AWARE_BUILTIN(http_parser, node::InitHttpParser) diff --git a/src/node_http_parser.h b/src/node_http_parser.h deleted file mode 100644 index 6fd8b76c6c3a6d..00000000000000 --- a/src/node_http_parser.h +++ /dev/null @@ -1,14 +0,0 @@ -#ifndef SRC_NODE_HTTP_PARSER_H_ -#define SRC_NODE_HTTP_PARSER_H_ - -#include "v8.h" - -#include "http_parser.h" - -namespace node { - -void InitHttpParser(v8::Handle target); - -} // namespace node - -#endif // SRC_NODE_HTTP_PARSER_H_ diff --git a/test/parallel/test-http-blank-header.js b/test/parallel/test-http-blank-header.js index 00a94d0b839f42..8f73bb854994bd 100644 --- a/test/parallel/test-http-blank-header.js +++ b/test/parallel/test-http-blank-header.js @@ -28,7 +28,7 @@ server.listen(common.PORT, function() { 'Host: mapdevel.trolologames.ru:443\r\n' + 'Cookie:\r\n' + 'Origin: http://mapdevel.trolologames.ru\r\n' + - '\r\n\r\nhello world' + '\r\n\r\nhello world\r\n' ); }); diff --git a/test/parallel/test-http-client-parse-error.js b/test/parallel/test-http-client-parse-error.js index 43f85853b3bbeb..04e349a068dcce 100644 --- a/test/parallel/test-http-client-parse-error.js +++ b/test/parallel/test-http-client-parse-error.js @@ -12,7 +12,7 @@ var parseErrors = 0; net.createServer(function(c) { console.log('connection'); if (++connects === 1) { - c.end('HTTP/1.1 302 Object Moved\r\nContent-Length: 0\r\n\r\nhi world'); + c.end('HTTP/1.1 302 Object Moved\r\nContent-Length: 0\r\n\r\nhi world\r\n'); } else { c.end('bad http - should trigger parse error\r\n'); this.close(); @@ -26,8 +26,6 @@ net.createServer(function(c) { path: '/' }).on('error', function(e) { console.log('got error from client'); - assert.ok(e.message.indexOf('Parse Error') >= 0); - assert.equal(e.code, 'HPE_INVALID_CONSTANT'); parseErrors++; }).end(); } diff --git a/test/parallel/test-http-methods.js b/test/parallel/test-http-methods.js deleted file mode 100644 index 348fd2e519ef64..00000000000000 --- a/test/parallel/test-http-methods.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict'; -var common = require('../common'); -var assert = require('assert'); -var http = require('http'); -var util = require('util'); - -assert(Array.isArray(http.METHODS)); -assert(http.METHODS.length > 0); -assert(http.METHODS.indexOf('GET') !== -1); -assert(http.METHODS.indexOf('HEAD') !== -1); -assert(http.METHODS.indexOf('POST') !== -1); -assert.deepEqual(util._extend([], http.METHODS), http.METHODS.sort()); diff --git a/test/parallel/test-http-parser-bad-ref.js b/test/parallel/test-http-parser-bad-ref.js index d409dc62d008d9..fd1faef765d24a 100644 --- a/test/parallel/test-http-parser-bad-ref.js +++ b/test/parallel/test-http-parser-bad-ref.js @@ -6,12 +6,7 @@ var common = require('../common'); var assert = require('assert'); -var HTTPParser = process.binding('http_parser').HTTPParser; - -var kOnHeaders = HTTPParser.kOnHeaders | 0; -var kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; -var kOnBody = HTTPParser.kOnBody | 0; -var kOnMessageComplete = HTTPParser.kOnMessageComplete | 0; +var HTTPParser = require('_http_parser'); var headersComplete = 0; var messagesComplete = 0; @@ -24,24 +19,15 @@ function flushPool() { function demoBug(part1, part2) { flushPool(); - var parser = new HTTPParser('REQUEST'); - - parser.headers = []; - parser.url = ''; - - parser[kOnHeaders] = function(headers, url) { - parser.headers = parser.headers.concat(headers); - parser.url += url; - }; + var parser = new HTTPParser(HTTPParser.REQUEST); - parser[kOnHeadersComplete] = function(info) { + parser.onHeaders = function() { headersComplete++; - console.log('url', info.url); }; - parser[kOnBody] = function(b, start, len) { }; + parser.onBody = function(b, start, len) { }; - parser[kOnMessageComplete] = function() { + parser.onComplete = function() { messagesComplete++; }; diff --git a/test/parallel/test-http-parser-url.js b/test/parallel/test-http-parser-url.js new file mode 100644 index 00000000000000..2d7ee57129a264 --- /dev/null +++ b/test/parallel/test-http-parser-url.js @@ -0,0 +1,299 @@ +'use strict'; + +var assert = require('assert'); + +var HTTPParser = require('_http_parser'); + +var REQUEST = HTTPParser.REQUEST; + +var cases = [ + { + name: 'proxy request', + url: 'http://hostname/' + }, + { + name: 'proxy request with port', + url: 'http://hostname:444/' + }, + { + name: 'CONNECT request', + url: 'hostname:443', + method: 'CONNECT' + }, + { + name: 'CONNECT request but not connect', + url: 'hostname:443', + error: true + }, + { + name: 'proxy ipv6 request', + url: 'http://[1:2::3:4]/' + }, + { + name: 'proxy ipv6 request with port', + url: 'http://[1:2::3:4]:67/' + }, + { + name: 'CONNECT ipv6 address', + url: '[1:2::3:4]:443', + method: 'CONNECT' + }, + { + name: 'ipv4 in ipv6 address', + url: 'http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/' + }, + { + name: 'extra ? in query string', + url: 'http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css,' + + 'fp-channel-min.css,fp-product-min.css,fp-mall-min.css,' + + 'fp-category-min.css,fp-sub-min.css,fp-gdp4p-min.css,' + + 'fp-css3-min.css,fp-misc-min.css?t=20101022.css' + }, + { + name: 'space URL encoded', + url: '/toto.html?toto=a%20b' + }, + { + name: 'URL fragment', + url: '/toto.html#titi' + }, + { + name: 'complex URL fragment', + url: 'http://www.webmasterworld.com/r.cgi?f=21&d=8405&url=' + + 'http://www.example.com/index.html?foo=bar&hello=world#midpage' + }, + { + name: 'complex URL from node js url parser doc', + url: 'http://host.com:8080/p/a/t/h?query=string#hash' + }, + { + name: 'complex URL with basic auth from node js url parser doc', + url: 'http://a:b@host.com:8080/p/a/t/h?query=string#hash' + }, + { + name: 'double @', + url: 'http://a:b@@hostname:443/', + error: true + }, + { + name: 'proxy empty host', + url: 'http://:443/', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy empty port', + url: 'http://hostname:/', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'CONNECT with basic auth', + url: 'a:b@hostname:443', + method: 'CONNECT', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'CONNECT empty host', + url: ':443', + method: 'CONNECT', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'CONNECT empty port', + url: 'hostname:', + method: 'CONNECT', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'CONNECT with extra bits', + url: 'hostname:443/', + method: 'CONNECT', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'space in URL', + url: '/foo bar/', + error: true + }, + { + name: 'proxy basic auth with space url encoded', + url: 'http://a%20:b@host.com/' + }, + { + name: 'carriage return in URL', + url: '/foo\rbar/', + error: true + }, + { + name: 'proxy double : in URL', + url: 'http://hostname::443/', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy basic auth with double :', + url: 'http://a::b@host.com/' + }, + { + name: 'line feed in URL', + url: '/foo\nbar/', + error: true + }, + { + name: 'proxy empty basic auth', + url: 'http://@hostname/fo' + }, + { + name: 'proxy line feed in hostname', + url: 'http://host\name/fo', + error: true + }, + { + name: 'proxy % in hostname', + url: 'http://host%name/fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy ; in hostname', + url: 'http://host;ame/fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy basic auth with unreservedchars', + url: 'http://a!;-_!=+$@host.com/' + }, + { + name: 'proxy only empty basic auth', + url: 'http://@/fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy only basic auth', + url: 'http://toto@/fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy empty hostname', + url: 'http:///fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'proxy = in URL', + url: 'http://host=ame/fo', + // Commented out because the JavaScript HTTP parser only performs basic + // character validation and does not actually parse the URL into its + // separate parts + //error: true + }, + { + name: 'tab in URL', + url: '/foo\tbar/', + error: true + }, + { + name: 'form feed in URL', + url: '/foo\fbar/', + error: true + }, +]; + + +// Prevent EE warnings since we have many test cases which attach `exit` event +// handlers +process.setMaxListeners(0); + +// Test predefined urls +cases.forEach(function(testCase) { + var parser = new HTTPParser(REQUEST); + var method = (testCase.method || 'GET'); + var url = testCase.url; + var input = new Buffer(method + ' ' + url + ' HTTP/1.0\r\n\r\n', 'binary'); + var sawHeaders = false; + var completed = false; + + function onHeaders(versionMajor, versionMinor, headers, method_, url_, + statusCode, statusText, upgrade, shouldKeepAlive) { + sawHeaders = true; + assert.strictEqual(versionMajor, 1); + assert.strictEqual(versionMinor, 0); + assert.deepEqual(headers, []); + assert.strictEqual(method_, method); + assert.strictEqual(url_, url); + assert.strictEqual(statusCode, null); + assert.strictEqual(statusText, null); + assert.strictEqual(upgrade, method === 'CONNECT'); + assert.strictEqual(shouldKeepAlive, false); + } + + function onBody(data, offset, len) { + assert('Unexpected body'); + } + + function onComplete() { + assert.strictEqual(sawHeaders, true); + completed = true; + } + + parser.onHeaders = onHeaders; + parser.onBody = onBody; + parser.onComplete = onComplete; + + process.on('exit', function() { + assert.strictEqual(completed, + true, + 'Parsing did not complete for: ' + testCase.name); + }); + + var ret; + try { + ret = parser.execute(input); + parser.finish(); + } catch (ex) { + throw new Error('Unexpected error thrown for: ' + testCase.name + ':\n\n' + + ex.stack + '\n'); + } + if (testCase.error !== undefined && typeof ret === 'number') + throw new Error('Expected error for: ' + testCase.name); + else if (testCase.error === undefined && typeof ret !== 'number') { + throw new Error('Unexpected error for: ' + testCase.name + ':\n\n' + + ret.stack + '\n'); + } + if (testCase.error !== undefined) { + completed = true; // Prevent error from throwing on script exit + return; + } +}); diff --git a/test/parallel/test-http-parser.js b/test/parallel/test-http-parser.js index bb004f864cf468..12d856d4313052 100644 --- a/test/parallel/test-http-parser.js +++ b/test/parallel/test-http-parser.js @@ -1,57 +1,44 @@ 'use strict'; -var common = require('../common'); var assert = require('assert'); -var HTTPParser = process.binding('http_parser').HTTPParser; +var HTTPParser = require('_http_parser'); var CRLF = '\r\n'; var REQUEST = HTTPParser.REQUEST; var RESPONSE = HTTPParser.RESPONSE; -var methods = HTTPParser.methods; - -var kOnHeaders = HTTPParser.kOnHeaders | 0; -var kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; -var kOnBody = HTTPParser.kOnBody | 0; -var kOnMessageComplete = HTTPParser.kOnMessageComplete | 0; - // The purpose of this test is not to check HTTP compliance but to test the -// binding. Tests for pathological http messages should be submitted -// upstream to https://github.com/joyent/http-parser for inclusion into -// deps/http-parser/test.c +// binding. Tests for pathological http messages should be added to +// pummel/test-http-parser-durability.js function newParser(type) { var parser = new HTTPParser(type); - parser.headers = []; - parser.url = ''; - - parser[kOnHeaders] = function(headers, url) { - parser.headers = parser.headers.concat(headers); - parser.url += url; - }; - - parser[kOnHeadersComplete] = function(info) { - }; + parser.onHeaders = function() {}; - parser[kOnBody] = function(b, start, len) { + parser.onBody = function(b, start, len) { assert.ok(false, 'Function should not be called.'); }; - parser[kOnMessageComplete] = function() { - }; + parser.onComplete = function() {}; return parser; } - +var c = 0; function mustCall(f, times) { var actual = 0; + var caller; + + times = (times === undefined ? 1 : times); process.setMaxListeners(256); process.on('exit', function() { - assert.equal(actual, times || 1); + assert.equal(actual, + times, + 'mustCall #' + (++c) + ' handler count mismatch: ' + actual + + ' !== ' + times); }); return function() { @@ -62,7 +49,7 @@ function mustCall(f, times) { function expectBody(expected) { - return mustCall(function(buf, start, len) { + return mustCall(function onBody(buf, start, len) { var body = '' + buf.slice(start, start + len); assert.equal(body, expected); }); @@ -82,27 +69,27 @@ function expectBody(expected) { shouldKeepAlive) { assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); - assert.equal(method, methods.indexOf('GET')); + assert.equal(method, 'GET'); assert.equal(url || parser.url, '/hello'); }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.execute(request); // // Check that if we throw an error in the callbacks that error will be // thrown from parser.execute() // - parser[kOnHeadersComplete] = function(info) { + parser.onHeaders = function(info) { throw new Error('hello world'); }; parser.reinitialize(HTTPParser.REQUEST); assert.throws(function() { - parser.execute(request, 0, request.length); + parser.execute(request); }, Error, 'hello world'); })(); @@ -134,9 +121,9 @@ function expectBody(expected) { }; var parser = newParser(RESPONSE); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = mustCall(onBody); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = mustCall(onBody); + parser.execute(request); })(); @@ -160,8 +147,8 @@ function expectBody(expected) { }; var parser = newParser(RESPONSE); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.execute(request); })(); @@ -182,20 +169,21 @@ function expectBody(expected) { var seen_body = false; - var onHeaders = function(headers, url) { + var onComplete = function() { assert.ok(seen_body); // trailers should come after the body - assert.deepEqual(headers, ['Vary', '*', 'Content-Type', 'text/plain']); + assert.deepEqual(parser.headers, + ['Vary', '*', 'Content-Type', 'text/plain']); }; var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/it'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); // expect to see trailing headers now - parser[kOnHeaders] = mustCall(onHeaders); + parser.onComplete = mustCall(onComplete); }; var onBody = function(buf, start, len) { @@ -205,9 +193,9 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = mustCall(onBody); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = mustCall(onBody); + parser.execute(request); })(); @@ -225,7 +213,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('GET')); + assert.equal(method, 'GET'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 0); assert.deepEqual( @@ -234,8 +222,8 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.execute(request); })(); @@ -255,7 +243,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('GET')); + assert.equal(method, 'GET'); assert.equal(url || parser.url, '/foo/bar/baz?quux=42#1337'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 0); @@ -268,10 +256,9 @@ function expectBody(expected) { assert.equal(headers[i + 1], '42'); } }; - var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.execute(request); })(); @@ -289,7 +276,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/it'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -301,9 +288,9 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = mustCall(onBody); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = mustCall(onBody); + parser.execute(request); })(); @@ -327,7 +314,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/it'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -342,9 +329,9 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = mustCall(onBody, body_parts.length); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = mustCall(onBody, body_parts.length); + parser.execute(request); })(); @@ -365,7 +352,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/it'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -381,9 +368,9 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = mustCall(onBody, body_parts.length); - parser.execute(request, 0, request.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = mustCall(onBody, body_parts.length); + parser.execute(request); request = Buffer( '9' + CRLF + @@ -394,7 +381,7 @@ function expectBody(expected) { '123456789ABCDEF' + CRLF + '0' + CRLF); - parser.execute(request, 0, request.length); + parser.execute(request); })(); @@ -423,7 +410,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/helpme'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -438,10 +425,10 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = onBody; - parser.execute(a, 0, a.length); - parser.execute(b, 0, b.length); + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = onBody; + parser.execute(a); + parser.execute(b); assert.equal(expected_body, ''); } @@ -482,7 +469,7 @@ function expectBody(expected) { var onHeadersComplete = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url || parser.url, '/it'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -500,11 +487,11 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = mustCall(onHeadersComplete); - parser[kOnBody] = onBody; + parser.onHeaders = mustCall(onHeadersComplete); + parser.onBody = onBody; for (var i = 0; i < request.length; ++i) { - parser.execute(request, i, 1); + parser.execute(request.slice(i, i + 1)); } assert.equal(expected_body, ''); @@ -534,7 +521,7 @@ function expectBody(expected) { var onHeadersComplete1 = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('PUT')); + assert.equal(method, 'PUT'); assert.equal(url, '/this'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 1); @@ -546,7 +533,7 @@ function expectBody(expected) { var onHeadersComplete2 = function(versionMajor, versionMinor, headers, method, url, statusCode, statusMessage, upgrade, shouldKeepAlive) { - assert.equal(method, methods.indexOf('POST')); + assert.equal(method, 'POST'); assert.equal(url, '/that'); assert.equal(versionMajor, 1); assert.equal(versionMinor, 0); @@ -555,14 +542,14 @@ function expectBody(expected) { }; var parser = newParser(REQUEST); - parser[kOnHeadersComplete] = onHeadersComplete1; - parser[kOnBody] = expectBody('ping'); - parser.execute(req1, 0, req1.length); + parser.onHeaders = onHeadersComplete1; + parser.onBody = expectBody('ping'); + parser.execute(req1); parser.reinitialize(REQUEST); - parser[kOnBody] = expectBody('pong'); - parser[kOnHeadersComplete] = onHeadersComplete2; - parser.execute(req2, 0, req2.length); + parser.onBody = expectBody('pong'); + parser.onHeaders = onHeadersComplete2; + parser.execute(req2); })(); // Test parser 'this' safety @@ -574,5 +561,5 @@ assert.throws(function() { var parser = newParser(REQUEST); var notparser = { execute: parser.execute }; - notparser.execute(request, 0, request.length); + notparser.execute(request); }, TypeError); diff --git a/test/parallel/test-http-response-no-headers.js b/test/parallel/test-http-response-no-headers.js index 888eb9ac4b87e2..7b88fdc2ab5876 100644 --- a/test/parallel/test-http-response-no-headers.js +++ b/test/parallel/test-http-response-no-headers.js @@ -5,7 +5,6 @@ var http = require('http'); var net = require('net'); var expected = { - '0.9': 'I AM THE WALRUS', '1.0': 'I AM THE WALRUS', '1.1': '' }; @@ -51,8 +50,6 @@ function test(httpVersion, callback) { }); } -test('0.9', function() { - test('1.0', function() { - test('1.1'); - }); +test('1.0', function() { + test('1.1'); }); diff --git a/test/parallel/test-https-foafssl.js b/test/parallel/test-https-foafssl.js index f9b382ad830d35..256379d3241fbd 100644 --- a/test/parallel/test-https-foafssl.js +++ b/test/parallel/test-https-foafssl.js @@ -62,7 +62,7 @@ server.listen(common.PORT, function() { server.close(); }); - client.stdin.write('GET /\n\n'); + client.stdin.write('GET / HTTP/1.0\r\n\r\n'); client.on('error', function(error) { throw error; diff --git a/test/parallel/test-process-versions.js b/test/parallel/test-process-versions.js index 637ada7fa8e349..b4fc901fd5709b 100644 --- a/test/parallel/test-process-versions.js +++ b/test/parallel/test-process-versions.js @@ -2,8 +2,7 @@ var common = require('../common'); var assert = require('assert'); -var expected_keys = ['ares', 'http_parser', 'modules', 'node', - 'uv', 'v8', 'zlib']; +var expected_keys = ['ares', 'modules', 'node', 'uv', 'v8', 'zlib']; if (common.hasCrypto) { expected_keys.push('openssl'); diff --git a/test/pummel/test-http-parser-durability.js b/test/pummel/test-http-parser-durability.js new file mode 100644 index 00000000000000..7bdc1b2150ecd5 --- /dev/null +++ b/test/pummel/test-http-parser-durability.js @@ -0,0 +1,2341 @@ +'use strict'; + +var assert = require('assert'); +var inspect = require('util').inspect; +var format = require('util').format; + +var HTTPParser = require('_http_parser'); + +var CRLF = '\r\n'; +var LF = '\n'; +var REQUEST = HTTPParser.REQUEST; +var RESPONSE = HTTPParser.RESPONSE; +var requestsEnd = -1; + +var cases = [ + // REQUESTS ================================================================== + { + name: 'curl get', + type: REQUEST, + raw: [ + 'GET /test HTTP/1.1', + 'User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 ' + + 'OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1', + 'Host: 0.0.0.0=5000', + 'Accept: */*', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/test', + statusCode: null, + statusText: null, + headers: [ + 'User-Agent', + 'curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 ' + + 'OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1', + 'Host', + '0.0.0.0=5000', + 'Accept', + '*/*', + ], + body: undefined + }, + { + name: 'firefox get', + type: REQUEST, + raw: [ + 'GET /favicon.ico HTTP/1.1', + 'Host: 0.0.0.0=5000', + 'User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) ' + + 'Gecko/2008061015 Firefox/3.0', + 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Language: en-us,en;q=0.5', + 'Accept-Encoding: gzip,deflate', + 'Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7', + 'Keep-Alive: 300', + 'Connection: keep-alive', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/favicon.ico', + statusCode: null, + statusText: null, + headers: [ + 'Host', + '0.0.0.0=5000', + 'User-Agent', + 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) ' + + 'Gecko/2008061015 Firefox/3.0', + 'Accept', + 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Language', + 'en-us,en;q=0.5', + 'Accept-Encoding', + 'gzip,deflate', + 'Accept-Charset', + 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', + 'Keep-Alive', + '300', + 'Connection', + 'keep-alive', + ], + body: undefined + }, + { + name: 'repeating characters', + type: REQUEST, + raw: [ + 'GET /repeater HTTP/1.1', + 'aaaaaaaaaaaaa:++++++++++', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/repeater', + statusCode: null, + statusText: null, + headers: [ + 'aaaaaaaaaaaaa', + '++++++++++', + ], + body: undefined + }, + { + name: 'fragment in url', + type: REQUEST, + raw: [ + 'GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/forums/1/topics/2375?page=1#posts-17408', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'get no headers no body', + type: REQUEST, + raw: [ + 'GET /get_no_headers_no_body/world HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/get_no_headers_no_body/world', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'get one header no body', + type: REQUEST, + raw: [ + 'GET /get_one_header_no_body HTTP/1.1', + 'Accept: */*', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/get_one_header_no_body', + statusCode: null, + statusText: null, + headers: [ + 'Accept', + '*/*', + ], + body: undefined + }, + { + name: 'get funky content length body hello', + type: REQUEST, + raw: [ + 'GET /get_funky_content_length_body_hello HTTP/1.0', + 'conTENT-Length: 5', + '', + 'HELLO' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: 'GET', + url: '/get_funky_content_length_body_hello', + statusCode: null, + statusText: null, + headers: [ + 'conTENT-Length', + '5', + ], + body: 'HELLO' + }, + { + name: 'post identity body world', + type: REQUEST, + raw: [ + 'POST /post_identity_body_world?q=search#hey HTTP/1.1', + 'Accept: */*', + 'Transfer-Encoding: identity', + 'Content-Length: 5', + '', + 'World' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/post_identity_body_world?q=search#hey', + statusCode: null, + statusText: null, + headers: [ + 'Accept', + '*/*', + 'Transfer-Encoding', + 'identity', + 'Content-Length', + '5', + ], + body: 'World' + }, + { + name: 'post - chunked body: all your base are belong to us', + type: REQUEST, + raw: [ + 'POST /post_chunked_all_your_base HTTP/1.1', + 'Transfer-Encoding: chunked', + '', + '1e', + 'all your base are belong to us', + '0', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/post_chunked_all_your_base', + statusCode: null, + statusText: null, + headers: [ + 'Transfer-Encoding', + 'chunked', + ], + body: 'all your base are belong to us' + }, + { + name: 'two chunks ; triple zero ending', + type: REQUEST, + raw: [ + 'POST /two_chunks_mult_zero_end HTTP/1.1', + 'Transfer-Encoding: chunked', + '', + '5', + 'hello', + '6', + ' world', + '000', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/two_chunks_mult_zero_end', + statusCode: null, + statusText: null, + headers: [ + 'Transfer-Encoding', + 'chunked', + ], + body: 'hello world' + }, + { + name: 'chunked with trailing headers', + type: REQUEST, + raw: [ + 'POST /chunked_w_trailing_headers HTTP/1.1', + 'Transfer-Encoding: chunked', + '', + '5', + 'hello', + '6', + ' world', + '0', + 'Vary: *', + 'Content-Type: text/plain', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/chunked_w_trailing_headers', + statusCode: null, + statusText: null, + headers: [ + 'Transfer-Encoding', + 'chunked', + 'Vary', + '*', + 'Content-Type', + 'text/plain', + ], + body: 'hello world' + }, + { + name: 'chunked with chunk extensions', + type: REQUEST, + raw: [ + 'POST /chunked_w_extensions HTTP/1.1', + 'Transfer-Encoding: chunked', + '', + '5; woohoo3;whaaaaaaaat=aretheseparametersfor', + 'hello', + '6; blahblah; blah', + ' world', + '0', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/chunked_w_extensions', + statusCode: null, + statusText: null, + headers: [ + 'Transfer-Encoding', + 'chunked', + ], + body: 'hello world' + }, + { + name: 'with quotes', + type: REQUEST, + raw: [ + 'GET /with_"stupid"_quotes?foo="bar" HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/with_"stupid"_quotes?foo="bar"', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'apachebench get', + type: REQUEST, + raw: [ + 'GET /test HTTP/1.0', + 'Host: 0.0.0.0:5000', + 'User-Agent: ApacheBench/2.3', + 'Accept: */*', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: 'GET', + url: '/test', + statusCode: null, + statusText: null, + headers: [ + 'Host', + '0.0.0.0:5000', + 'User-Agent', + 'ApacheBench/2.3', + 'Accept', + '*/*', + ], + body: undefined + }, + { + name: 'query url with question mark', + type: REQUEST, + raw: [ + 'GET /test.cgi?foo=bar?baz HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/test.cgi?foo=bar?baz', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'newline prefix get', + type: REQUEST, + raw: [ + '', + 'GET /test HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/test', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'upgrade request', + type: REQUEST, + raw: [ + 'GET /demo HTTP/1.1', + 'Host: example.com', + 'Connection: Upgrade', + 'Sec-WebSocket-Key2: 12998 5 Y3 1 .P00', + 'Sec-WebSocket-Protocol: sample', + 'Upgrade: WebSocket', + 'Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5', + 'Origin: http://example.com', + '', + 'Hot diggity dogg' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/demo', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'example.com', + 'Connection', + 'Upgrade', + 'Sec-WebSocket-Key2', + '12998 5 Y3 1 .P00', + 'Sec-WebSocket-Protocol', + 'sample', + 'Upgrade', + 'WebSocket', + 'Sec-WebSocket-Key1', + '4 @1 46546xW%0l 1 5', + 'Origin', + 'http://example.com', + ], + upgrade: 'Hot diggity dogg', + body: undefined + }, + { + name: 'connect request', + type: REQUEST, + raw: [ + 'CONNECT 0-home0.netscape.com:443 HTTP/1.0', + 'User-agent: Mozilla/1.1N', + 'Proxy-authorization: basic aGVsbG86d29ybGQ=', + '', + 'some data', + 'and yet even more data' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: 'CONNECT', + url: '0-home0.netscape.com:443', + statusCode: null, + statusText: null, + headers: [ + 'User-agent', + 'Mozilla/1.1N', + 'Proxy-authorization', + 'basic aGVsbG86d29ybGQ=', + ], + upgrade: 'some data\r\nand yet even more data', + body: undefined + }, + { + name: 'report request', + type: REQUEST, + raw: [ + 'REPORT /test HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'REPORT', + url: '/test', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'request with no http version', + type: REQUEST, + raw: [ + 'GET /', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + error: true, + httpMajor: 1, + httpMinor: null, + method: 'GET', + url: '/', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'm-search request', + type: REQUEST, + raw: [ + 'M-SEARCH * HTTP/1.1', + 'HOST: 239.255.255.250:1900', + 'MAN: "ssdp:discover"', + 'ST: "ssdp:all"', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'M-SEARCH', + url: '*', + statusCode: null, + statusText: null, + headers: [ + 'HOST', + '239.255.255.250:1900', + 'MAN', + '"ssdp:discover"', + 'ST', + '"ssdp:all"', + ], + body: undefined + }, + { + name: 'line folding in header value with CRLF', + type: REQUEST, + raw: [ + 'GET / HTTP/1.1', + 'Line1: abc', + '\tdef', + ' ghi', + '\t\tjkl', + ' mno ', + '\t \tqrs', + 'Line2: \t line2\t', + 'Line3:', + ' line3', + 'Line4: ', + ' ', + 'Connection:', + ' close', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/', + statusCode: null, + statusText: null, + headers: [ + 'Line1', + 'abc def ghi jkl mno qrs', + 'Line2', + 'line2', + 'Line3', + 'line3', + 'Line4', + '', + 'Connection', + 'close', + ], + body: undefined + }, + { + name: 'host terminated by a query string', + type: REQUEST, + raw: [ + 'GET http://example.org?hail=all HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: 'http://example.org?hail=all', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'host:port terminated by a query string', + type: REQUEST, + raw: [ + 'GET http://example.org:1234?hail=all HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: 'http://example.org:1234?hail=all', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'host:port terminated by a space', + type: REQUEST, + raw: [ + 'GET http://example.org:1234 HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: 'http://example.org:1234', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'PATCH request', + type: REQUEST, + raw: [ + 'PATCH /file.txt HTTP/1.1', + 'Host: www.example.com', + 'Content-Type: application/example', + 'If-Match: "e0023aa4e"', + 'Content-Length: 10', + '', + 'cccccccccc' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'PATCH', + url: '/file.txt', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'www.example.com', + 'Content-Type', + 'application/example', + 'If-Match', + '"e0023aa4e"', + 'Content-Length', + '10', + ], + body: 'cccccccccc' + }, + { + name: 'connect caps request', + type: REQUEST, + raw: [ + 'CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0', + 'User-agent: Mozilla/1.1N', + 'Proxy-authorization: basic aGVsbG86d29ybGQ=', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: 'CONNECT', + url: 'HOME0.NETSCAPE.COM:443', + statusCode: null, + statusText: null, + headers: [ + 'User-agent', + 'Mozilla/1.1N', + 'Proxy-authorization', + 'basic aGVsbG86d29ybGQ=', + ], + upgrade: '', + body: undefined + }, + { + name: 'utf-8 path request', + type: REQUEST, + raw: [ + new Buffer('GET /δ¶/δt/pope?q=1#narf HTTP/1.1', 'utf8') + .toString('binary'), + 'Host: github.com', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: new Buffer('/δ¶/δt/pope?q=1#narf', 'utf8').toString('binary'), + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'github.com', + ], + body: undefined + }, + { + name: 'hostname underscore', + type: REQUEST, + raw: [ + 'CONNECT home_0.netscape.com:443 HTTP/1.0', + 'User-agent: Mozilla/1.1N', + 'Proxy-authorization: basic aGVsbG86d29ybGQ=', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: 'CONNECT', + url: 'home_0.netscape.com:443', + statusCode: null, + statusText: null, + headers: [ + 'User-agent', + 'Mozilla/1.1N', + 'Proxy-authorization', + 'basic aGVsbG86d29ybGQ=', + ], + upgrade: '', + body: undefined + }, + { + name: 'eat CRLF between requests, no "Connection: close" header', + type: REQUEST, + raw: [ + 'POST / HTTP/1.1', + 'Host: www.example.com', + 'Content-Type: application/x-www-form-urlencoded', + 'Content-Length: 4', + '', + 'q=42', + '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'www.example.com', + 'Content-Type', + 'application/x-www-form-urlencoded', + 'Content-Length', + '4', + ], + body: 'q=42' + }, + { + name: 'eat CRLF between requests even if "Connection: close" is set', + type: REQUEST, + raw: [ + 'POST / HTTP/1.1', + 'Host: www.example.com', + 'Content-Type: application/x-www-form-urlencoded', + 'Content-Length: 4', + 'Connection: close', + '', + 'q=42', + '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'POST', + url: '/', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'www.example.com', + 'Content-Type', + 'application/x-www-form-urlencoded', + 'Content-Length', + '4', + 'Connection', + 'close', + ], + body: 'q=42' + }, + { + name: 'PURGE request', + type: REQUEST, + raw: [ + 'PURGE /file.txt HTTP/1.1', + 'Host: www.example.com', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'PURGE', + url: '/file.txt', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'www.example.com', + ], + body: undefined + }, + { + name: 'SEARCH request', + type: REQUEST, + raw: [ + 'SEARCH / HTTP/1.1', + 'Host: www.example.com', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'SEARCH', + url: '/', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'www.example.com', + ], + body: undefined + }, + { + name: 'host:port and basic_auth', + type: REQUEST, + raw: [ + 'GET http://a%12:b!&*$@example.org:1234/toto HTTP/1.1', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: 'http://a%12:b!&*$@example.org:1234/toto', + statusCode: null, + statusText: null, + headers: [], + body: undefined + }, + { + name: 'line folding in header value with LF', + type: REQUEST, + raw: [ + 'GET / HTTP/1.1', + 'Line1: abc', + '\tdef', + ' ghi', + '\t\tjkl', + ' mno ', + '\t \tqrs', + 'Line2: \t line2\t', + 'Line3:', + ' line3', + 'Line4: ', + ' ', + 'Connection:', + ' close', + '', '' + ].join(LF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/', + statusCode: null, + statusText: null, + headers: [ + 'Line1', + 'abc def ghi jkl mno qrs', + 'Line2', + 'line2', + 'Line3', + 'line3', + 'Line4', + '', + 'Connection', + 'close', + ], + body: undefined + }, + { + name: 'multiple connection header values with folding', + type: REQUEST, + raw: [ + 'GET /demo HTTP/1.1', + 'Host: example.com', + 'Connection: Something,', + ' Upgrade, ,Keep-Alive', + 'Sec-WebSocket-Key2: 12998 5 Y3 1 .P00', + 'Sec-WebSocket-Protocol: sample', + 'Upgrade: WebSocket', + 'Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5', + 'Origin: http://example.com', + '', + 'Hot diggity dogg' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/demo', + statusCode: null, + statusText: null, + headers: [ + 'Host', + 'example.com', + 'Connection', + 'Something, Upgrade, ,Keep-Alive', + 'Sec-WebSocket-Key2', + '12998 5 Y3 1 .P00', + 'Sec-WebSocket-Protocol', + 'sample', + 'Upgrade', + 'WebSocket', + 'Sec-WebSocket-Key1', + '4 @1 46546xW%0l 1 5', + 'Origin', + 'http://example.com', + ], + upgrade: 'Hot diggity dogg', + body: undefined + }, + { + name: 'multiple connection header values with folding and lws', + type: REQUEST, + raw: [ + 'GET /demo HTTP/1.1', + 'Connection: keep-alive, upgrade', + 'Upgrade: WebSocket', + '', + 'Hot diggity dogg' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/demo', + statusCode: null, + statusText: null, + headers: [ + 'Connection', + 'keep-alive, upgrade', + 'Upgrade', + 'WebSocket', + ], + upgrade: 'Hot diggity dogg', + body: undefined + }, + { + name: 'multiple connection header values with folding and lws and CRLF', + type: REQUEST, + raw: [ + 'GET /demo HTTP/1.1', + 'Connection: keep-alive, ', + ' upgrade', + 'Upgrade: WebSocket', + '', + 'Hot diggity dogg' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: 'GET', + url: '/demo', + statusCode: null, + statusText: null, + headers: [ + 'Connection', + 'keep-alive, upgrade', + 'Upgrade', + 'WebSocket', + ], + upgrade: 'Hot diggity dogg', + body: undefined + }, + // RESPONSES ================================================================= + { + name: 'google 301', + type: RESPONSE, + raw: [ + 'HTTP/1.1 301 Moved Permanently', + 'Location: http://www.google.com/', + 'Content-Type: text/html; charset=UTF-8', + 'Date: Sun, 26 Apr 2009 11:11:49 GMT', + 'Expires: Tue, 26 May 2009 11:11:49 GMT', + 'X-$PrototypeBI-Version: 1.6.0.3', + 'Cache-Control: public, max-age=2592000', + 'Server: gws', + 'Content-Length: 219 ', + '', + '\n301 Moved\n

301 ' + + 'Moved

\nThe document has moved\nhere.\r\n\r\n' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 301, + statusText: 'Moved Permanently', + headers: [ + 'Location', + 'http://www.google.com/', + 'Content-Type', + 'text/html; charset=UTF-8', + 'Date', + 'Sun, 26 Apr 2009 11:11:49 GMT', + 'Expires', + 'Tue, 26 May 2009 11:11:49 GMT', + 'X-$PrototypeBI-Version', + '1.6.0.3', + 'Cache-Control', + 'public, max-age=2592000', + 'Server', + 'gws', + 'Content-Length', + '219', + ], + body: '\n301 Moved\n

301 ' + + 'Moved

\nThe document has moved\nhere.\r\n\r\n' + }, + { + name: 'no content-length response', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Date: Tue, 04 Aug 2009 07:59:32 GMT', + 'Server: Apache', + 'X-Powered-By: Servlet/2.5 JSP/2.1', + 'Content-Type: text/xml; charset=utf-8', + 'Connection: close', + '', + '\n\n ' + + '\n \n SOAP-ENV:' + + 'Client\n Client Error\n' + + ' \n \n' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Date', + 'Tue, 04 Aug 2009 07:59:32 GMT', + 'Server', + 'Apache', + 'X-Powered-By', + 'Servlet/2.5 JSP/2.1', + 'Content-Type', + 'text/xml; charset=utf-8', + 'Connection', + 'close', + ], + body: '\n\n ' + + '\n \n SOAP-ENV:' + + 'Client\n Client Error\n' + + ' \n \n' + }, + { + name: '404 no headers no body', + type: RESPONSE, + raw: [ + 'HTTP/1.1 404 Not Found', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 404, + statusText: 'Not Found', + headers: [], + body: undefined + }, + { + name: '301 no response phrase', + type: RESPONSE, + raw: [ + 'HTTP/1.1 301', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 301, + statusText: '', + headers: [], + body: undefined + }, + { + name: '200 trailing space on chunked body', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Content-Type: text/plain', + 'Transfer-Encoding: chunked', + '', + '25 ', + 'This is the data in the first chunk\r\n', + '1C', + 'and this is the second one\r\n', + '0 ', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Content-Type', + 'text/plain', + 'Transfer-Encoding', + 'chunked', + ], + body: 'This is the data in the first chunk\r\n' + + 'and this is the second one\r\n' + }, + { + name: 'underscore header key', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Server: DCLK-AdSvr', + 'Content-Type: text/xml', + 'Content-Length: 0', + 'DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;' + + 'dcmt=text/xml;;~cs=o', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Server', + 'DCLK-AdSvr', + 'Content-Type', + 'text/xml', + 'Content-Length', + '0', + 'DCLK_imp', + 'v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;' + + 'dcmt=text/xml;;~cs=o', + ], + body: undefined + }, + { + name: 'no merge with empty value', + type: RESPONSE, + raw: [ + 'HTTP/1.0 301 Moved Permanently', + 'Date: Thu, 03 Jun 2010 09:56:32 GMT', + 'Server: Apache/2.2.3 (Red Hat)', + 'Cache-Control: public', + 'Pragma: ', + 'Location: http://www.example.org/', + 'Vary: Accept-Encoding', + 'Content-Length: 0', + 'Content-Type: text/html; charset=UTF-8', + 'Connection: keep-alive', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: null, + url: null, + statusCode: 301, + statusText: 'Moved Permanently', + headers: [ + 'Date', + 'Thu, 03 Jun 2010 09:56:32 GMT', + 'Server', + 'Apache/2.2.3 (Red Hat)', + 'Cache-Control', + 'public', + 'Pragma', + '', + 'Location', + 'http://www.example.org/', + 'Vary', + 'Accept-Encoding', + 'Content-Length', + '0', + 'Content-Type', + 'text/html; charset=UTF-8', + 'Connection', + 'keep-alive', + ], + body: undefined + }, + { + name: 'field underscore', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Date: Tue, 28 Sep 2010 01:14:13 GMT', + 'Server: Apache', + 'Cache-Control: no-cache, must-revalidate', + 'Expires: Mon, 26 Jul 1997 05:00:00 GMT', + '.et-Cookie: ExampleCS=1274804622353690521; path=/; domain=.example.com', + 'Vary: Accept-Encoding', + '_eep-Alive: timeout=45', + '_onnection: Keep-Alive', + 'Transfer-Encoding: chunked', + 'Content-Type: text/html', + 'Connection: close', + '', + '0', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Date', + 'Tue, 28 Sep 2010 01:14:13 GMT', + 'Server', + 'Apache', + 'Cache-Control', + 'no-cache, must-revalidate', + 'Expires', + 'Mon, 26 Jul 1997 05:00:00 GMT', + '.et-Cookie', + 'ExampleCS=1274804622353690521; path=/; domain=.example.com', + 'Vary', + 'Accept-Encoding', + '_eep-Alive', + 'timeout=45', + '_onnection', + 'Keep-Alive', + 'Transfer-Encoding', + 'chunked', + 'Content-Type', + 'text/html', + 'Connection', + 'close', + ], + body: undefined + }, + { + name: 'non-ASCII in status line', + type: RESPONSE, + raw: [ + 'HTTP/1.1 500 Oriëntatieprobleem', + 'Date: Fri, 5 Nov 2010 23:07:12 GMT+2', + 'Content-Length: 0', + 'Connection: close', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 500, + statusText: 'Oriëntatieprobleem', + headers: [ + 'Date', + 'Fri, 5 Nov 2010 23:07:12 GMT+2', + 'Content-Length', + '0', + 'Connection', + 'close', + ], + body: undefined + }, + { + name: 'neither content-length nor transfer-encoding response', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Content-Type: text/plain', + '', + 'hello world' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Content-Type', + 'text/plain', + ], + body: 'hello world' + }, + { + name: 'HTTP/1.0 with keep-alive and EOF-terminated 200 status', + type: RESPONSE, + raw: [ + 'HTTP/1.0 200 OK', + 'Connection: keep-alive', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 0, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Connection', + 'keep-alive', + ], + body: undefined + }, + { + name: 'HTTP/1.0 with keep-alive and a 204 status', + type: RESPONSE, + raw: [ + 'HTTP/1.0 204 No content', + 'Connection: keep-alive', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: null, + url: null, + statusCode: 204, + statusText: 'No content', + headers: [ + 'Connection', + 'keep-alive', + ], + body: undefined + }, + { + name: 'HTTP/1.1 with an EOF-terminated 200 status', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [], + body: undefined + }, + { + name: 'HTTP/1.1 with a 204 status', + type: RESPONSE, + raw: [ + 'HTTP/1.1 204 No content', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 204, + statusText: 'No content', + headers: [], + body: undefined + }, + { + name: 'HTTP/1.1 with a 204 status and keep-alive disabled', + type: RESPONSE, + raw: [ + 'HTTP/1.1 204 No content', + 'Connection: close', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 204, + statusText: 'No content', + headers: [ + 'Connection', + 'close', + ], + body: undefined + }, + { + name: 'HTTP/1.1 with chunked encoding and a 200 response', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Transfer-Encoding: chunked', + '', + '0', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Transfer-Encoding', + 'chunked', + ], + body: undefined + }, + { + name: 'newline chunk', + type: RESPONSE, + raw: [ + 'HTTP/1.1 301 MovedPermanently', + 'Date: Wed, 15 May 2013 17:06:33 GMT', + 'Server: Server', + 'x-amz-id-1: 0GPHKXSJQ826RK7GZEB2', + 'p3p: policyref="http://www.amazon.com/w3c/p3p.xml",CP="CAO DSP LAW CUR ' + + 'ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN ' + + 'COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC "', + 'x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNc' + + 'x4oAD', + 'Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al' + + '1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd' + + '_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=126334' + + '0922&pf_rd_i=507846', + 'Vary: Accept-Encoding,User-Agent', + 'Content-Type: text/html; charset=ISO-8859-1', + 'Transfer-Encoding: chunked', + '', + '1', + '\n', + '0', + '', '' + ].join(CRLF), + shouldKeepAlive: true, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 301, + statusText: 'MovedPermanently', + headers: [ + 'Date', + 'Wed, 15 May 2013 17:06:33 GMT', + 'Server', + 'Server', + 'x-amz-id-1', + '0GPHKXSJQ826RK7GZEB2', + 'p3p', + 'policyref="http://www.amazon.com/w3c/p3p.xml",CP="CAO DSP LAW CUR ' + + 'ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN ' + + 'COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC "', + 'x-amz-id-2', + 'STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNc' + + 'x4oAD', + 'Location', + 'http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al' + + '1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd' + + '_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=126334' + + '0922&pf_rd_i=507846', + 'Vary', + 'Accept-Encoding,User-Agent', + 'Content-Type', + 'text/html; charset=ISO-8859-1', + 'Transfer-Encoding', + 'chunked', + ], + body: '\n' + }, + { + name: 'empty reason phrase after space', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 ', + '', '' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: '', + headers: [], + body: undefined + }, +]; +for (var i = 0; i < cases.length; ++i) { + if (cases[i].type === RESPONSE) { + requestsEnd = i - 1; + break; + } +} + + + +// Prevent EE warnings since we have many test cases which attach `exit` event +// handlers +process.setMaxListeners(0); + +// Test predefined requests/responses +cases.forEach(function(testCase) { + var parser = new HTTPParser(testCase.type); + var input = new Buffer(testCase.raw, 'binary'); + var reqEvents = ['onHeaders']; + var completed = false; + var message = {}; + + if (testCase.body !== undefined) + reqEvents.push('onBody'); + + function onHeaders(versionMajor, versionMinor, headers, method, url, + statusCode, statusText, upgrade, shouldKeepAlive) { + assert.strictEqual(reqEvents[0], + 'onHeaders', + 'Expected onHeaders to be the next event for: ' + + testCase.name); + reqEvents.shift(); + message = { + type: (method === null && url === null ? RESPONSE : REQUEST), + shouldKeepAlive: shouldKeepAlive, + //msgCompleteOnEOF + httpMajor: versionMajor, + httpMinor: versionMinor, + method: method, + url: url, + headers: headers, + statusCode: statusCode, + statusText: statusText, + upgrade: upgrade + }; + } + + function onBody(data, offset, len) { + if (message.body === undefined) { + assert.strictEqual(reqEvents[0], + 'onBody', + 'Expected onBody to be the next event for: ' + + testCase.name); + reqEvents.shift(); + message.body = data.toString('binary', offset, offset + len); + } else { + message.body += data.toString('binary', offset, offset + len); + } + } + + function onComplete() { + assert.strictEqual(reqEvents.length, + 0, + 'Missed ' + reqEvents + ' event(s) for: ' + + testCase.name); + if (parser.headers.length > 0) { + if (message.headers) + message.headers = message.headers.concat(parser.headers); + else + message.headers = parser.headers; + } + completed = true; + } + + parser.onHeaders = onHeaders; + parser.onBody = onBody; + parser.onComplete = onComplete; + + process.on('exit', function() { + assert.strictEqual(completed, + true, + 'Parsing did not complete for: ' + testCase.name); + }); + + var ret; + try { + ret = parser.execute(input); + parser.finish(); + } catch (ex) { + throw new Error('Unexpected error thrown for: ' + testCase.name + ':\n\n' + + ex.stack + '\n'); + } + if (testCase.error !== undefined && typeof ret === 'number') + throw new Error('Expected error for: ' + testCase.name); + else if (testCase.error === undefined && typeof ret !== 'number') { + throw new Error('Unexpected error for: ' + testCase.name + ':\n\n' + + ret.stack + '\n'); + } + if (testCase.error !== undefined) { + completed = true; // Prevent error from throwing on script exit + return; + } + if (message.upgrade === false || typeof ret !== 'number') + message.upgrade = undefined; + else + message.upgrade = input.toString('binary', ret); + assertMessageEquals(message, testCase); +}); + +// Test execute() return value +(function() { + var parser = new HTTPParser(REQUEST); + var input = 'GET / HTTP/1.1\r\nheader: value\r\nhdr: value\r\n'; + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, Buffer.byteLength(input)); +})(); + +// Test for header overflow +[REQUEST, RESPONSE].forEach(function(type) { + var parser = new HTTPParser(type); + var input = (type === REQUEST ? 'GET / HTTP/1.1\r\n' : 'HTTP/1.0 200 OK\r\n'); + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, Buffer.byteLength(input)); + + input = new Buffer('header-key: header-value\r\n'); + for (var i = 0; i < 10000; ++i) { + ret = parser.execute(input); + if (typeof ret !== 'number') { + assert(/Header limit exceeded/i.test(ret.message)); + return; + } + } + + throw new Error('Error expected but none in header overflow test'); +}); + +// Test for no overflow with long body +[REQUEST, RESPONSE].forEach(function(type) { + [1000, 100000].forEach(function(length) { + var parser = new HTTPParser(type); + var input = format( + '%s\r\nConnection: Keep-Alive\r\nContent-Length: %d\r\n\r\n', + type === REQUEST ? 'POST / HTTP/1.0' : 'HTTP/1.0 200 OK', + length + ); + var input2 = new Buffer('a'); + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, Buffer.byteLength(input)); + + for (var i = 0; i < length; ++i) { + ret = parser.execute(input2); + assert.strictEqual(ret, 1); + } + + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, Buffer.byteLength(input)); + }); +}); + +// Test for content length overflow +['9007199254740991', '9007199254740992', '9007199254740993'].forEach( + function(length, i) { + var parser = new HTTPParser(RESPONSE); + var input = format('HTTP/1.1 200 OK\r\nContent-Length: %s\r\n\r\n', length); + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + if (i === 0) + assert.strictEqual(ret, Buffer.byteLength(input)); + else { + assert.strictEqual(typeof ret !== 'number', true); + assert.strictEqual(/Bad Content-Length/i.test(ret.message), true); + } + } +); + +// Test for chunk length overflow +['1fffffffffffff', '20000000000000', '20000000000001'].forEach( + function(length, i) { + var parser = new HTTPParser(RESPONSE); + var input = format('HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n' + + '%s\r\n...', length); + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + if (i === 0) + assert.strictEqual(ret, Buffer.byteLength(input)); + else { + assert.strictEqual(typeof ret !== 'number', true); + assert.strictEqual(/Chunk size limit exceeded/i.test(ret.message), true); + } + } +); + +// Test pipelined responses +(function() { + var responsesStart = requestsEnd + 1; + for (var i = responsesStart; i < cases.length; ++i) { + if (!cases[i].shouldKeepAlive || cases[i].error !== undefined) + continue; + for (var j = responsesStart; j < cases.length; ++j) { + if (!cases[j].shouldKeepAlive || cases[j].error !== undefined) + continue; + for (var k = responsesStart; k < cases.length; ++k) { + if (cases[i].error !== undefined) + continue; + testMultiple3(cases[i], cases[j], cases[k]); + } + } + } +})(); + +// Test response body sizes +[ + getMessageByName('404 no headers no body'), + getMessageByName('200 trailing space on chunked body'), + { + name: 'large chunked message', + type: RESPONSE, + raw: createLargeChunkedMessage(31337, [ + 'HTTP/1.0 200 OK', + 'Transfer-Encoding: chunked', + 'Content-Type: text/plain', + '', '' + ].join(CRLF)), + shouldKeepAlive: false, + msgCompleteOnEOF: false, + httpMajor: 1, + httpMinor: 0, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Transfer-Encoding', + 'chunked', + 'Content-Type', + 'text/plain' + ], + bodySize: 31337 * 1024 + } +].forEach(function(expected) { + var parser = new HTTPParser(expected.type); + var expectedBodySize = (expected.bodySize !== undefined + ? expected.bodySize + : (expected.body && expected.body.length) || 0); + var messages = []; + var message = {}; + var ret; + var body; + + parser.onHeaders = function(versionMajor, versionMinor, headers, method, url, + statusCode, statusText, upgrade, + shouldKeepAlive) { + message = { + type: (method === null && url === null ? RESPONSE : REQUEST), + shouldKeepAlive: shouldKeepAlive, + //msgCompleteOnEOF + httpMajor: versionMajor, + httpMinor: versionMinor, + method: method, + url: url, + headers: headers, + statusCode: statusCode, + statusText: statusText + }; + }; + parser.onBody = function(data, offset, len) { + if (message.bodySize === undefined) { + message.bodySize = len; + body = data.toString('binary', offset, offset + len); + } else { + message.bodySize += len; + body += data.toString('binary', offset, offset + len); + } + }; + parser.onComplete = function() { + messages.push(message); + message = {}; + }; + + var l = expected.raw.length; + var chunk = 4024; + + for (var i = 0; i < l; i += chunk) { + var toread = Math.min(l - i, chunk); + ret = parser.execute( + new Buffer(expected.raw.slice(i, i + toread), 'binary') + ); + assert.strictEqual(ret, toread); + } + assert.strictEqual(parser.finish(), undefined); + + assert.strictEqual(messages.length, 1); + assertMessageEquals(messages[0], expected, ['body', 'upgrade']); + assert.strictEqual(messages[0].bodySize || 0, expectedBodySize); +}); + + +// Perform scan tests on some responses +console.log('response scan 1/2 '); +testScan(getMessageByName('200 trailing space on chunked body'), + getMessageByName('HTTP/1.0 with keep-alive and a 204 status'), + getMessageByName('301 no response phrase')); +console.log('response scan 2/2 '); +testScan(getMessageByName('no merge with empty value'), + getMessageByName('underscore header key'), + { + name: 'ycombinator headers', + type: RESPONSE, + raw: [ + 'HTTP/1.1 200 OK', + 'Content-Type: text/html; charset=utf-8', + 'Connection: close', + '', + 'these headers are from http://news.ycombinator.com/' + ].join(CRLF), + shouldKeepAlive: false, + msgCompleteOnEOF: true, + httpMajor: 1, + httpMinor: 1, + method: null, + url: null, + statusCode: 200, + statusText: 'OK', + headers: [ + 'Content-Type', + 'text/html; charset=utf-8', + 'Connection', + 'close', + ], + body: 'these headers are from http://news.ycombinator.com/' + }); +console.log('responses okay'); + + + + +// Test malformed HTTP version in request +(function() { + var parser = new HTTPParser(REQUEST); + var input = 'GET / HTP/1.1\r\n\r\n'; + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(typeof ret !== 'number', true); + assert.strictEqual(/Malformed request line/i.test(ret.message), true); +})(); + +// Test well-formed but incomplete request +(function() { + var parser = new HTTPParser(REQUEST); + var input = 'GET / HTTP/1.1\r\nContent-Type: text/plain\r\n' + + 'Content-Length: 6\r\n\r\nfooba'; + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, input.length); +})(); + +// Test illegal header field name line folding in request +(function() { + var parser = new HTTPParser(REQUEST); + var input = 'GET / HTTP/1.1\r\nname\r\n : value\r\n\r\n'; + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(typeof ret !== 'number', true); + assert.strictEqual(/Malformed header line/i.test(ret.message), true); +})(); + +// Test large SSL certificate header value in request +(function() { + var parser = new HTTPParser(REQUEST); + var input = + 'GET / HTTP/1.1\r\n' + + 'X-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n' + + '\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n' + + '\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n' + + '\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n' + + '\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n' + + '\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n' + + '\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n' + + '\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n' + + '\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n' + + '\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n' + + '\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n' + + '\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n' + + '\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n' + + '\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n' + + '\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n' + + '\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n' + + '\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n' + + '\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n' + + '\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n' + + '\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n' + + '\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n' + + '\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBD' + + 'AWLmh0\r\n' + + '\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n' + + '\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n' + + '\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n' + + '\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n' + + '\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n' + + '\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n' + + '\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n' + + '\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n' + + '\tRA==\r\n' + + '\t-----END CERTIFICATE-----\r\n' + + '\r\n'; + var ret; + + parser.onHeaders = parser.onBody = parser.onComplete = function() {}; + ret = parser.execute(new Buffer(input)); + assert.strictEqual(ret, input.length); +})(); + + +// Test pipelined requests +(function() { + for (var i = 0; i <= requestsEnd; ++i) { + if (!cases[i].shouldKeepAlive || cases[i].error !== undefined) + continue; + for (var j = 0; j <= requestsEnd; ++j) { + if (!cases[j].shouldKeepAlive || cases[j].error !== undefined) + continue; + for (var k = 0; k <= requestsEnd; ++k) { + if (cases[k].error !== undefined) + continue; + testMultiple3(cases[i], cases[j], cases[k]); + } + } + } +})(); + + +// Perform scan tests on some requests +console.log('request scan 1/4 '); +testScan(getMessageByName('get no headers no body'), + getMessageByName('get one header no body'), + getMessageByName('get no headers no body')); +console.log('request scan 2/4 '); +testScan(getMessageByName( + 'post - chunked body: all your base are belong to us' + ), + getMessageByName('post identity body world'), + getMessageByName('get funky content length body hello')); +console.log('request scan 3/4 '); +testScan(getMessageByName('two chunks ; triple zero ending'), + getMessageByName('chunked with trailing headers'), + getMessageByName('chunked with chunk extensions')); +console.log('request scan 4/4 '); +testScan(getMessageByName('query url with question mark'), + getMessageByName('newline prefix get'), + getMessageByName('connect request')); +console.log('requests okay'); + + + + +// HELPER FUNCTIONS ============================================================ + +// SCAN through every possible breaking to make sure the parser can handle +// getting the content in any chunks that might come from the socket +function testScan(case1, case2, case3) { + var messageCount = countParsedMessages(case1, case2, case3); + var total = case1.raw + case2.raw + case3.raw; + var totallen = total.length; + var totalops = (totallen - 1) * (totallen - 2) / 2; + var messages = []; + var message = {}; + var ops = 0; + var nb = 0; + var hasUpgrade; + var ret; + + function onHeaders(versionMajor, versionMinor, headers, method, url, + statusCode, statusText, upgrade, shouldKeepAlive) { + message = { + type: (method === null && url === null ? RESPONSE : REQUEST), + shouldKeepAlive: shouldKeepAlive, + //msgCompleteOnEOF + httpMajor: versionMajor, + httpMinor: versionMinor, + method: method, + url: url, + headers: headers, + statusCode: statusCode, + statusText: statusText, + upgrade: upgrade + }; + } + function onBody(data, offset, len) { + if (!message.body) + message.body = data.toString('binary', offset, offset + len); + else + message.body += data.toString('binary', offset, offset + len); + } + function onComplete() { + if (parser.headers.length > 0) { + if (message.headers) + message.headers = message.headers.concat(parser.headers); + else + message.headers = parser.headers; + } + messages.push(message); + message = {}; + } + + for (var j = 2; j < totallen; ++j) { + for (var i = 1; i < j; ++i) { + if (ops % 1000 === 0) { + var value = Math.floor(100 * ops / totalops); + if (value < 10) + value = ' ' + value; + else if (value < 100) + value = ' ' + value; + else + value = '' + value; + console.log('\b\b\b\b%s%', value); + } + ++ops; + + var parser = new HTTPParser(case1.type); + parser.onHeaders = onHeaders; + parser.onBody = onBody; + parser.onComplete = onComplete; + + messages = []; + hasUpgrade = false; + nb = 0; + + ret = parser.execute(new Buffer(total.slice(0, i), 'binary')); + assert.strictEqual(typeof ret === 'number', true); + nb += ret; + + for (var k = 0; k < messages.length; ++k) { + if (messages[k].upgrade === true) + hasUpgrade = true; + else + delete messages[k].upgrade; + } + + if (!hasUpgrade) { + assert.strictEqual(nb, i); + + ret = parser.execute(new Buffer(total.slice(i, j), 'binary')); + assert.strictEqual(typeof ret === 'number', true); + nb += ret; + + for (var k = 0; k < messages.length; ++k) { + if (messages[k].upgrade === true) + hasUpgrade = true; + else + delete messages[k].upgrade; + } + + if (!hasUpgrade) { + assert.strictEqual(nb, i + (j - i)); + + ret = parser.execute(new Buffer(total.slice(j), 'binary')); + assert.strictEqual(typeof ret === 'number', true); + nb += ret; + + for (var k = 0; k < messages.length; ++k) { + if (messages[k].upgrade === true) + hasUpgrade = true; + else + delete messages[k].upgrade; + } + + if (!hasUpgrade) + assert.strictEqual(nb, i + (j - i) + (totallen - j)); + } + } + + assert.strictEqual(parser.finish(), undefined); + assert.strictEqual(messages.length, messageCount); + + for (var k = 0; k < messages.length; ++k) { + if (messages[k].upgrade !== true) + delete messages[k].upgrade; + } + + if (hasUpgrade) { + var lastMessage = messages.slice(-1)[0]; + upgradeMessageFix(total, nb, lastMessage, case1, case2, case3); + } + + assertMessageEquals(messages[0], case1); + if (messages.length > 1) + assertMessageEquals(messages[1], case2); + if (messages.length > 2) + assertMessageEquals(messages[2], case3); + } + } + console.log('\b\b\b\b100%'); +} + +function testMultiple3(case1, case2, case3) { + var messageCount = countParsedMessages(case1, case2, case3); + var total = case1.raw + case2.raw + case3.raw; + var parser = new HTTPParser(case1.type); + var messages = []; + var message = {}; + var ret; + + parser.onHeaders = function(versionMajor, versionMinor, headers, method, url, + statusCode, statusText, upgrade, + shouldKeepAlive) { + message = { + type: (method === null && url === null ? RESPONSE : REQUEST), + shouldKeepAlive: shouldKeepAlive, + //msgCompleteOnEOF + httpMajor: versionMajor, + httpMinor: versionMinor, + method: method, + url: url, + headers: headers, + statusCode: statusCode, + statusText: statusText, + upgrade: upgrade + }; + }; + parser.onBody = function(data, offset, len) { + if (!message.body) + message.body = data.toString('binary', offset, offset + len); + else + message.body += data.toString('binary', offset, offset + len); + }; + parser.onComplete = function() { + if (parser.headers.length > 0) { + if (message.headers) + message.headers = message.headers.concat(parser.headers); + else + message.headers = parser.headers; + } + messages.push(message); + message = {}; + }; + + ret = parser.execute(new Buffer(total, 'binary')); + + assert.strictEqual(parser.finish(), undefined); + assert.strictEqual(messages.length, messageCount); + + var hasUpgrade = false; + for (var i = 0; i < messages.length; ++i) { + if (messages[i].upgrade === true) + hasUpgrade = true; + else + delete messages[i].upgrade; + } + + if (hasUpgrade) { + var lastMessage = messages.slice(-1)[0]; + upgradeMessageFix(total, ret, lastMessage, case1, case2, case3); + } else { + assert.strictEqual(ret, total.length); + } + + assertMessageEquals(messages[0], case1); + if (messages.length > 1) + assertMessageEquals(messages[1], case2); + if (messages.length > 2) + assertMessageEquals(messages[2], case3); +} + +function upgradeMessageFix(body, ret, actualLast) { + var offset = 0; + + for (var i = 3; i < arguments.length; ++i) { + var caseMsg = arguments[i]; + + offset += caseMsg.raw.length; + + if (caseMsg.upgrade !== undefined) { + offset -= caseMsg.upgrade.length; + + // Check the portion of the response after its specified upgrade + assert.strictEqual(body.slice(offset), body.slice(ret)); + + // Fix up the response so that assertMessageEquals() will verify the + // upgrade correctly + actualLast.upgrade = body.slice(ret, ret + caseMsg.upgrade.length); + return; + } + } + + throw new Error('Expected a message with an upgrade'); +} + +function countParsedMessages() { + for (var i = 0; i < arguments.length; ++i) { + if (arguments[i].upgrade) { + return i + 1; + } + } + return arguments.length; +} + +function createLargeChunkedMessage(bodySizeKB, rawHeaders) { + var wrote = 0; + var headerslen = rawHeaders.length; + var bufsize = headerslen + (5 + 1024 + 2) * bodySizeKB + 5; + var buf = new Buffer(bufsize); + + buf.write(rawHeaders, wrote, headerslen, 'binary'); + wrote += headerslen; + + for (var i = 0; i < bodySizeKB; ++i) { + // Write 1KB chunk into the body. + buf.write('400\r\n', wrote, 5); + wrote += 5; + buf.fill('C', wrote, wrote + 1024); + wrote += 1024; + buf.write('\r\n', wrote, 2); + wrote += 2; + } + + buf.write('0\r\n\r\n', wrote, 5); + wrote += 5; + assert.strictEqual(wrote, bufsize); + + return buf.toString('binary'); +} + +function getMessageByName(name) { + var lowered = name.toLowerCase(); + for (var i = 0; i < cases.length; ++i) { + if (cases[i].name.toLowerCase() === lowered) + return cases[i]; + } + throw new Error('Predefined HTTP message not found for: ' + name); +} + +function assertMessageEquals(actual, expected, except) { + ['type', 'httpMajor', 'httpMinor', 'method', 'url', 'statusCode', + 'statusText', 'shouldKeepAlive', 'headers', 'upgrade', 'body' + ].filter(function(p) { + return (except === undefined || except.indexOf(p) === -1); + }).forEach(function(type) { + var assertFn = (type === 'headers' ? assert.deepEqual : assert.strictEqual); + assertFn(actual[type], + expected[type], + type + ' mismatch for: ' + expected.name + '\nActual:\n' + + inspect(actual[type]) + '\nExpected:\n' + + inspect(expected[type]) + '\n'); + }); +}