Skip to content

Commit

Permalink
cli: add --max-http-header-size flag
Browse files Browse the repository at this point in the history
Allow the maximum size of HTTP headers to be overridden from
the command line.

co-authored-by: Matteo Collina <hello@matteocollina.com>
PR-URL: #24811
Fixes: #24692
Reviewed-By: Anna Henningsen <anna@addaleax.net>
Reviewed-By: Myles Borins <myles.borins@gmail.com>
Reviewed-By: Michael Dawson <michael_dawson@ca.ibm.com>
Reviewed-By: Сковорода Никита Андреевич <chalkerx@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Jeremiah Senkpiel <fishrock123@rocketmail.com>
  • Loading branch information
2 people authored and MylesBorins committed Dec 21, 2018
1 parent c80ac7f commit edd8bd0
Show file tree
Hide file tree
Showing 7 changed files with 184 additions and 28 deletions.
8 changes: 8 additions & 0 deletions doc/api/cli.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,13 @@ added: v9.0.0

Specify the `file` of the custom [experimental ECMAScript Module][] loader.

### `--max-http-header-size=size`
<!-- YAML
added: REPLACEME
-->

Specify the maximum size, in bytes, of HTTP headers. Defaults to 8KB.

### `--napi-modules`
<!-- YAML
added: v7.10.0
Expand Down Expand Up @@ -604,6 +611,7 @@ Node.js options that are allowed are:
- `--inspect-brk`
- `--inspect-port`
- `--loader`
- `--max-http-header-size`
- `--napi-modules`
- `--no-deprecation`
- `--no-force-async-hooks-checks`
Expand Down
3 changes: 3 additions & 0 deletions doc/node.1
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,9 @@ Specify the
as a custom loader, to load
.Fl -experimental-modules .
.
.It Fl -max-http-header-size Ns = Ns Ar size
Specify the maximum size of HTTP headers in bytes. Defaults to 8KB.
.
.It Fl -napi-modules
This option is a no-op.
It is kept for compatibility.
Expand Down
18 changes: 14 additions & 4 deletions src/node_http_parser_impl.h
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,7 @@ class Parser : public AsyncWrap, public StreamListener {
int TrackHeader(size_t len) {
#ifdef NODE_EXPERIMENTAL_HTTP
header_nread_ += len;
if (header_nread_ >= kMaxHeaderSize) {
if (header_nread_ >= per_process_opts->max_http_header_size) {
llhttp_set_error_reason(&parser_, "HPE_HEADER_OVERFLOW:Header overflow");
return HPE_USER;
}
Expand Down Expand Up @@ -892,9 +892,6 @@ class Parser : public AsyncWrap, public StreamListener {
typedef int (Parser::*DataCall)(const char* at, size_t length);

static const parser_settings_t settings;
#ifdef NODE_EXPERIMENTAL_HTTP
static const uint64_t kMaxHeaderSize = 8 * 1024;
#endif /* NODE_EXPERIMENTAL_HTTP */
};

const parser_settings_t Parser::settings = {
Expand All @@ -916,6 +913,14 @@ const parser_settings_t Parser::settings = {
};


#ifndef NODE_EXPERIMENTAL_HTTP
void InitMaxHttpHeaderSizeOnce() {
const uint32_t max_http_header_size = per_process_opts->max_http_header_size;
http_parser_set_max_header_size(max_http_header_size);
}
#endif /* NODE_EXPERIMENTAL_HTTP */


void InitializeHttpParser(Local<Object> target,
Local<Value> unused,
Local<Context> context,
Expand Down Expand Up @@ -965,6 +970,11 @@ void InitializeHttpParser(Local<Object> target,
target->Set(env->context(),
FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"),
t->GetFunction(env->context()).ToLocalChecked()).FromJust();

#ifndef NODE_EXPERIMENTAL_HTTP
static uv_once_t init_once = UV_ONCE_INIT;
uv_once(&init_once, InitMaxHttpHeaderSizeOnce);
#endif /* NODE_EXPERIMENTAL_HTTP */
}

} // anonymous namespace
Expand Down
4 changes: 4 additions & 0 deletions src/node_options.cc
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,10 @@ PerProcessOptionsParser::PerProcessOptionsParser() {
kAllowedInEnvironment);
AddAlias("--trace-events-enabled", {
"--trace-event-categories", "v8,node,node.async_hooks" });
AddOption("--max-http-header-size",
"set the maximum size of HTTP headers (default: 8KB)",
&PerProcessOptions::max_http_header_size,
kAllowedInEnvironment);
AddOption("--v8-pool-size",
"set V8's thread pool size",
&PerProcessOptions::v8_thread_pool_size,
Expand Down
1 change: 1 addition & 0 deletions src/node_options.h
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ class PerProcessOptions : public Options {
std::string title;
std::string trace_event_categories;
std::string trace_event_file_pattern = "node_trace.${rotation}.log";
uint64_t max_http_header_size = 8 * 1024;
int64_t v8_thread_pool_size = 4;
bool zero_fill_all_buffers = false;

Expand Down
69 changes: 45 additions & 24 deletions test/sequential/test-http-max-http-headers.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,14 @@ const common = require('../common');
const assert = require('assert');
const http = require('http');
const net = require('net');
const MAX = 8 * 1024; // 8KB
const MAX = +(process.argv[2] || 8 * 1024); // Command line option, or 8KB.

const { getOptionValue } = require('internal/options');

console.log('pid is', process.pid);
console.log('max header size is', getOptionValue('--max-http-header-size'));
console.log('current http parser is', getOptionValue('--http-parser'));

// Verify that we cannot receive more than 8KB of headers.

function once(cb) {
Expand Down Expand Up @@ -38,19 +42,15 @@ function fillHeaders(headers, currentSize, valid = false) {

// Generate valid headers
if (valid) {
// TODO(mcollina): understand why -9 is needed instead of -1
headers = headers.slice(0, -9);
// TODO(mcollina): understand why -32 is needed instead of -1
headers = headers.slice(0, -32);
}
return headers + '\r\n\r\n';
}

const timeout = common.platformTimeout(10);

function writeHeaders(socket, headers) {
const array = [];

// this is off from 1024 so that \r\n does not get split
const chunkSize = 1000;
const chunkSize = 100;
let last = 0;

for (let i = 0; i < headers.length / chunkSize; i++) {
Expand All @@ -65,19 +65,25 @@ function writeHeaders(socket, headers) {
next();

function next() {
if (socket.write(array.shift())) {
if (array.length === 0) {
socket.end();
} else {
setTimeout(next, timeout);
}
if (socket.destroyed) {
console.log('socket was destroyed early, data left to write:',
array.join('').length);
return;
}

const chunk = array.shift();

if (chunk) {
console.log('writing chunk of size', chunk.length);
socket.write(chunk, next);
} else {
socket.once('drain', next);
socket.end();
}
}
}

function test1() {
console.log('test1');
let headers =
'HTTP/1.1 200 OK\r\n' +
'Content-Length: 0\r\n' +
Expand All @@ -92,6 +98,9 @@ function test1() {
writeHeaders(sock, headers);
sock.resume();
});

// The socket might error but that's ok
sock.on('error', () => {});
});

server.listen(0, common.mustCall(() => {
Expand All @@ -100,17 +109,17 @@ function test1() {

client.on('error', common.mustCall((err) => {
assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
server.close();
setImmediate(test2);
server.close(test2);
}));
}));
}

const test2 = common.mustCall(() => {
console.log('test2');
let headers =
'GET / HTTP/1.1\r\n' +
'Host: localhost\r\n' +
'Agent: node\r\n' +
'Agent: nod2\r\n' +
'X-CRASH: ';

// /, Host, localhost, Agent, node, X-CRASH, a...
Expand All @@ -119,7 +128,7 @@ const test2 = common.mustCall(() => {

const server = http.createServer(common.mustNotCall());

server.on('clientError', common.mustCall((err) => {
server.once('clientError', common.mustCall((err) => {
assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW');
}));

Expand All @@ -131,34 +140,46 @@ const test2 = common.mustCall(() => {
});

finished(client, common.mustCall((err) => {
server.close();
setImmediate(test3);
server.close(test3);
}));
}));
});

const test3 = common.mustCall(() => {
console.log('test3');
let headers =
'GET / HTTP/1.1\r\n' +
'Host: localhost\r\n' +
'Agent: node\r\n' +
'Agent: nod3\r\n' +
'X-CRASH: ';

// /, Host, localhost, Agent, node, X-CRASH, a...
const currentSize = 1 + 4 + 9 + 5 + 4 + 7;
headers = fillHeaders(headers, currentSize, true);

console.log('writing', headers.length);

const server = http.createServer(common.mustCall((req, res) => {
res.end('hello world');
setImmediate(server.close.bind(server));
res.end('hello from test3 server');
server.close();
}));

server.on('clientError', (err) => {
console.log(err.code);
if (err.code === 'HPE_HEADER_OVERFLOW') {
console.log(err.rawPacket.toString('hex'));
}
});
server.on('clientError', common.mustNotCall());

server.listen(0, common.mustCall(() => {
const client = net.connect(server.address().port);
client.on('connect', () => {
writeHeaders(client, headers);
client.resume();
});

client.pipe(process.stdout);
}));
});

Expand Down
109 changes: 109 additions & 0 deletions test/sequential/test-set-http-max-http-headers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
'use strict';

const common = require('../common');
const assert = require('assert');
const { spawn } = require('child_process');
const path = require('path');
const testName = path.join(__dirname, 'test-http-max-http-headers.js');
const parsers = ['legacy', 'llhttp'];

const timeout = common.platformTimeout(100);

const tests = [];

function test(fn) {
tests.push(fn);
}

parsers.forEach((parser) => {
test(function(cb) {
console.log('running subtest expecting failure');

// Validate that the test fails if the max header size is too small.
const args = ['--expose-internals',
`--http-parser=${parser}`,
'--max-http-header-size=1024',
testName];
const cp = spawn(process.execPath, args, { stdio: 'inherit' });

cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 1);
assert.strictEqual(signal, null);
cb();
}));
});

test(function(cb) {
console.log('running subtest expecting success');

const env = Object.assign({}, process.env, {
NODE_DEBUG: 'http'
});

// Validate that the test fails if the max header size is too small.
// Validate that the test now passes if the same limit becomes large enough.
const args = ['--expose-internals',
`--http-parser=${parser}`,
'--max-http-header-size=1024',
testName,
'1024'];
const cp = spawn(process.execPath, args, {
env,
stdio: 'inherit'
});

cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
cb();
}));
});

// Next, repeat the same checks using NODE_OPTIONS if it is supported.
if (process.config.variables.node_without_node_options) {
const env = Object.assign({}, process.env, {
NODE_OPTIONS: `--http-parser=${parser} --max-http-header-size=1024`
});

test(function(cb) {
console.log('running subtest expecting failure');

// Validate that the test fails if the max header size is too small.
const args = ['--expose-internals', testName];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });

cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 1);
assert.strictEqual(signal, null);
cb();
}));
});

test(function(cb) {
// Validate that the test now passes if the same limit
// becomes large enough.
const args = ['--expose-internals', testName, '1024'];
const cp = spawn(process.execPath, args, { env, stdio: 'inherit' });

cp.on('close', common.mustCall((code, signal) => {
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
cb();
}));
});
}
});

function runTest() {
const fn = tests.shift();

if (!fn) {
return;
}

fn(() => {
setTimeout(runTest, timeout);
});
}

runTest();

0 comments on commit edd8bd0

Please sign in to comment.