diff --git a/CHANGES/7700.bugfix b/CHANGES/7700.bugfix new file mode 100644 index 00000000000..26fdfa9076b --- /dev/null +++ b/CHANGES/7700.bugfix @@ -0,0 +1 @@ +Fix issue with insufficient HTTP method and version validation. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index cb3a8ef2a09..fa0e8909a73 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -1,4 +1,3 @@ -# type: ignore # Tests for aiohttp/protocol.py import asyncio @@ -19,7 +18,6 @@ HttpPayloadParser, HttpRequestParserPy, HttpResponseParserPy, - HttpVersion, ) try: @@ -31,8 +29,8 @@ brotli = None -REQUEST_PARSERS: Any = [HttpRequestParserPy] -RESPONSE_PARSERS: Any = [HttpResponseParserPy] +REQUEST_PARSERS = [HttpRequestParserPy] +RESPONSE_PARSERS = [HttpResponseParserPy] try: from aiohttp.http_parser import HttpRequestParserC, HttpResponseParserC @@ -63,6 +61,7 @@ def parser(loop: Any, protocol: Any, request: Any): loop, 2**16, max_line_size=8190, + max_headers=32768, max_field_size=8190, ) @@ -81,6 +80,7 @@ def response(loop: Any, protocol: Any, request: Any): loop, 2**16, max_line_size=8190, + max_headers=32768, max_field_size=8190, ) @@ -96,7 +96,7 @@ def stream(): return mock.Mock() -@pytest.mark.skipif(NO_EXTENSIONS, reason="Extensions available but not imported") +@pytest.mark.skipif(NO_EXTENSIONS, reason="Extentions available but not imported") def test_c_parser_loaded(): assert "HttpRequestParserC" in dir(aiohttp.http_parser) assert "HttpResponseParserC" in dir(aiohttp.http_parser) @@ -178,7 +178,6 @@ def test_cve_2023_37276(parser: Any) -> None: "Baz: abc\x00def", "Foo : bar", # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 "Foo\t: bar", - "\xffoo: bar", ), ) def test_bad_headers(parser: Any, hdr: str) -> None: @@ -187,23 +186,6 @@ def test_bad_headers(parser: Any, hdr: str) -> None: parser.feed_data(text) -def test_unpaired_surrogate_in_header_py(loop: Any, protocol: Any) -> None: - parser = HttpRequestParserPy( - protocol, - loop, - 2**16, - max_line_size=8190, - max_field_size=8190, - ) - text = b"POST / HTTP/1.1\r\n\xff\r\n\r\n" - message = None - try: - parser.feed_data(text) - except http_exceptions.InvalidHeader as e: - message = e.message.encode("utf-8") - assert message is not None - - def test_content_length_transfer_encoding(parser: Any) -> None: text = ( b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\nTransfer-Encoding: a\r\n\r\n" @@ -265,7 +247,7 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) -def test_parse(parser: Any) -> None: +def test_parse(parser) -> None: text = b"GET /test HTTP/1.1\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -277,7 +259,7 @@ def test_parse(parser: Any) -> None: assert msg.version == (1, 1) -async def test_parse_body(parser: Any) -> None: +async def test_parse_body(parser) -> None: text = b"GET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -286,7 +268,7 @@ async def test_parse_body(parser: Any) -> None: assert body == b"body" -async def test_parse_body_with_CRLF(parser: Any) -> None: +async def test_parse_body_with_CRLF(parser) -> None: text = b"\r\nGET /test HTTP/1.1\r\nContent-Length: 4\r\n\r\nbody" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -295,7 +277,7 @@ async def test_parse_body_with_CRLF(parser: Any) -> None: assert body == b"body" -def test_parse_delayed(parser: Any) -> None: +def test_parse_delayed(parser) -> None: text = b"GET /test HTTP/1.1\r\n" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 0 @@ -307,7 +289,7 @@ def test_parse_delayed(parser: Any) -> None: assert msg.method == "GET" -def test_headers_multi_feed(parser: Any) -> None: +def test_headers_multi_feed(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" text2 = b"test: line\r" text3 = b"\n continue\r\n\r\n" @@ -329,7 +311,7 @@ def test_headers_multi_feed(parser: Any) -> None: assert not msg.upgrade -def test_headers_split_field(parser: Any) -> None: +def test_headers_split_field(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" text2 = b"t" text3 = b"es" @@ -350,7 +332,7 @@ def test_headers_split_field(parser: Any) -> None: assert not msg.upgrade -def test_parse_headers_multi(parser: Any) -> None: +def test_parse_headers_multi(parser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"Set-Cookie: c1=cookie1\r\n" @@ -373,63 +355,63 @@ def test_parse_headers_multi(parser: Any) -> None: assert msg.compression is None -def test_conn_default_1_0(parser: Any) -> None: +def test_conn_default_1_0(parser) -> None: text = b"GET /test HTTP/1.0\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close -def test_conn_default_1_1(parser: Any) -> None: +def test_conn_default_1_1(parser) -> None: text = b"GET /test HTTP/1.1\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close -def test_conn_close(parser: Any) -> None: +def test_conn_close(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"connection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close -def test_conn_close_1_0(parser: Any) -> None: +def test_conn_close_1_0(parser) -> None: text = b"GET /test HTTP/1.0\r\n" b"connection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close -def test_conn_keep_alive_1_0(parser: Any) -> None: +def test_conn_keep_alive_1_0(parser) -> None: text = b"GET /test HTTP/1.0\r\n" b"connection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close -def test_conn_keep_alive_1_1(parser: Any) -> None: +def test_conn_keep_alive_1_1(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"connection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close -def test_conn_other_1_0(parser: Any) -> None: +def test_conn_other_1_0(parser) -> None: text = b"GET /test HTTP/1.0\r\n" b"connection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close -def test_conn_other_1_1(parser: Any) -> None: +def test_conn_other_1_1(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"connection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close -def test_request_chunked(parser: Any) -> None: +def test_request_chunked(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] @@ -473,30 +455,21 @@ def test_conn_upgrade(parser: Any) -> None: assert upgrade -def test_bad_upgrade(parser: Any) -> None: - """Test not upgraded if missing Upgrade header.""" - text = b"GET /test HTTP/1.1\r\nconnection: upgrade\r\n\r\n" - messages, upgrade, tail = parser.feed_data(text) - msg = messages[0][0] - assert not msg.upgrade - assert not upgrade - - -def test_compression_empty(parser: Any) -> None: +def test_compression_empty(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: \r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None -def test_compression_deflate(parser: Any) -> None: +def test_compression_deflate(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: deflate\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "deflate" -def test_compression_gzip(parser: Any) -> None: +def test_compression_gzip(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: gzip\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -504,14 +477,14 @@ def test_compression_gzip(parser: Any) -> None: @pytest.mark.skipif(brotli is None, reason="brotli is not installed") -def test_compression_brotli(parser: Any) -> None: +def test_compression_brotli(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: br\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "br" -def test_compression_unknown(parser: Any) -> None: +def test_compression_unknown(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: compress\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -553,27 +526,27 @@ def test_headers_old_websocket_key1(parser: Any) -> None: parser.feed_data(text) -def test_headers_content_length_err_1(parser: Any) -> None: +def test_headers_content_length_err_1(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-length: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) -def test_headers_content_length_err_2(parser: Any) -> None: +def test_headers_content_length_err_2(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-length: -1\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) -def test_invalid_header(parser: Any) -> None: +def test_invalid_header(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"test line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) -def test_invalid_name(parser: Any) -> None: +def test_invalid_name(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"test[]: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): @@ -581,7 +554,7 @@ def test_invalid_name(parser: Any) -> None: @pytest.mark.parametrize("size", [40960, 8191]) -def test_max_header_field_size(parser: Any, size: Any) -> None: +def test_max_header_field_size(parser, size) -> None: name = b"t" * size text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n" @@ -590,7 +563,7 @@ def test_max_header_field_size(parser: Any, size: Any) -> None: parser.feed_data(text) -def test_max_header_field_size_under_limit(parser: Any) -> None: +def test_max_header_field_size_under_limit(parser) -> None: name = b"t" * 8190 text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n" @@ -609,7 +582,7 @@ def test_max_header_field_size_under_limit(parser: Any) -> None: @pytest.mark.parametrize("size", [40960, 8191]) -def test_max_header_value_size(parser: Any, size: Any) -> None: +def test_max_header_value_size(parser, size) -> None: name = b"t" * size text = b"GET /test HTTP/1.1\r\n" b"data:" + name + b"\r\n\r\n" @@ -618,7 +591,7 @@ def test_max_header_value_size(parser: Any, size: Any) -> None: parser.feed_data(text) -def test_max_header_value_size_under_limit(parser: Any) -> None: +def test_max_header_value_size_under_limit(parser) -> None: value = b"A" * 8190 text = b"GET /test HTTP/1.1\r\n" b"data:" + value + b"\r\n\r\n" @@ -637,7 +610,7 @@ def test_max_header_value_size_under_limit(parser: Any) -> None: @pytest.mark.parametrize("size", [40965, 8191]) -def test_max_header_value_size_continuation(parser: Any, size: Any) -> None: +def test_max_header_value_size_continuation(parser, size) -> None: name = b"T" * (size - 5) text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n" @@ -646,7 +619,7 @@ def test_max_header_value_size_continuation(parser: Any, size: Any) -> None: parser.feed_data(text) -def test_max_header_value_size_continuation_under_limit(parser: Any) -> None: +def test_max_header_value_size_continuation_under_limit(parser) -> None: value = b"A" * 8185 text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + value + b"\r\n\r\n" @@ -664,7 +637,7 @@ def test_max_header_value_size_continuation_under_limit(parser: Any) -> None: assert msg.url == URL("/test") -def test_http_request_parser(parser: Any) -> None: +def test_http_request_parser(parser) -> None: text = b"GET /path HTTP/1.1\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -681,7 +654,7 @@ def test_http_request_parser(parser: Any) -> None: assert msg.url == URL("/path") -def test_http_request_bad_status_line(parser: Any) -> None: +def test_http_request_bad_status_line(parser) -> None: text = b"getpath \r\n\r\n" with pytest.raises(http_exceptions.BadStatusLine) as exc_info: parser.feed_data(text) @@ -689,13 +662,7 @@ def test_http_request_bad_status_line(parser: Any) -> None: assert r"\n" not in exc_info.value.message -def test_http_request_bad_status_line_whitespace(parser: Any) -> None: - text = b"GET\n/path\fHTTP/1.1\r\n\r\n" - with pytest.raises(http_exceptions.BadStatusLine): - parser.feed_data(text) - - -def test_http_request_upgrade(parser: Any) -> None: +def test_http_request_upgrade(parser) -> None: text = ( b"GET /test HTTP/1.1\r\n" b"connection: upgrade\r\n" @@ -710,7 +677,7 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" -def test_http_request_parser_utf8(parser: Any) -> None: +def test_http_request_parser_utf8(parser) -> None: text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode() messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -727,7 +694,7 @@ def test_http_request_parser_utf8(parser: Any) -> None: assert msg.url == URL("/path") -def test_http_request_parser_non_utf8(parser: Any) -> None: +def test_http_request_parser_non_utf8(parser) -> None: text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode("cp1251") msg = parser.feed_data(text)[0][0][0] @@ -745,7 +712,7 @@ def test_http_request_parser_non_utf8(parser: Any) -> None: assert msg.url == URL("/path") -def test_http_request_parser_two_slashes(parser: Any) -> None: +def test_http_request_parser_two_slashes(parser) -> None: text = b"GET //path HTTP/1.1\r\n\r\n" msg = parser.feed_data(text)[0][0][0] @@ -759,12 +726,12 @@ def test_http_request_parser_two_slashes(parser: Any) -> None: assert not msg.chunked -def test_http_request_parser_bad_method(parser: Any) -> None: +def test_http_request_parser_bad_method(parser) -> None: with pytest.raises(http_exceptions.BadStatusLine): parser.feed_data(b'G=":<>(e),[T];?" /get HTTP/1.1\r\n\r\n') -def test_http_request_parser_bad_version(parser: Any) -> None: +def test_http_request_parser_bad_version(parser) -> None: with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(b"GET //get HT/11\r\n\r\n") @@ -774,25 +741,15 @@ def test_http_request_parser_bad_version_number(parser: Any) -> None: parser.feed_data(b"GET /test HTTP/1.32\r\n\r\n") -def test_http_request_parser_bad_ascii_uri(parser: Any) -> None: - with pytest.raises(http_exceptions.InvalidURLError): - parser.feed_data(b"GET ! HTTP/1.1\r\n\r\n") - - -def test_http_request_parser_bad_nonascii_uri(parser: Any) -> None: - with pytest.raises(http_exceptions.InvalidURLError): - parser.feed_data(b"GET \xff HTTP/1.1\r\n\r\n") - - @pytest.mark.parametrize("size", [40965, 8191]) -def test_http_request_max_status_line(parser: Any, size: Any) -> None: +def test_http_request_max_status_line(parser, size) -> None: path = b"t" * (size - 5) match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): parser.feed_data(b"GET /path" + path + b" HTTP/1.1\r\n\r\n") -def test_http_request_max_status_line_under_limit(parser: Any) -> None: +def test_http_request_max_status_line_under_limit(parser) -> None: path = b"t" * (8190 - 5) messages, upgraded, tail = parser.feed_data( b"GET /path" + path + b" HTTP/1.1\r\n\r\n" @@ -811,7 +768,7 @@ def test_http_request_max_status_line_under_limit(parser: Any) -> None: assert msg.url == URL("/path" + path.decode()) -def test_http_response_parser_utf8(response: Any) -> None: +def test_http_response_parser_utf8(response) -> None: text = "HTTP/1.1 200 Ok\r\nx-test:тест\r\n\r\n".encode() messages, upgraded, tail = response.feed_data(text) @@ -827,33 +784,15 @@ def test_http_response_parser_utf8(response: Any) -> None: assert not tail -def test_http_response_parser_utf8_without_reason(response: Any) -> None: - text = "HTTP/1.1 200 \r\nx-test:тест\r\n\r\n".encode() - - messages, upgraded, tail = response.feed_data(text) - assert len(messages) == 1 - msg = messages[0][0] - - assert msg.version == (1, 1) - assert msg.code == 200 - assert msg.reason == "" - assert msg.headers == CIMultiDict([("X-TEST", "тест")]) - assert msg.raw_headers == ((b"x-test", "тест".encode()),) - assert not upgraded - assert not tail - - @pytest.mark.parametrize("size", [40962, 8191]) -def test_http_response_parser_bad_status_line_too_long( - response: Any, size: Any -) -> None: +def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): response.feed_data(b"HTTP/1.1 200 Ok" + reason + b"\r\n\r\n") -def test_http_response_parser_status_line_under_limit(response: Any) -> None: +def test_http_response_parser_status_line_under_limit(response) -> None: reason = b"O" * 8190 messages, upgraded, tail = response.feed_data( b"HTTP/1.1 200 " + reason + b"\r\n\r\n" @@ -864,17 +803,17 @@ def test_http_response_parser_status_line_under_limit(response: Any) -> None: assert msg.reason == reason.decode() -def test_http_response_parser_bad_version(response: Any) -> None: +def test_http_response_parser_bad_version(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HT/11 200 Ok\r\n\r\n") -def test_http_response_parser_bad_version_number(response: Any) -> None: +def test_http_response_parser_bad_version_number(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTTP/12.3 200 Ok\r\n\r\n") -def test_http_response_parser_no_reason(response: Any) -> None: +def test_http_response_parser_no_reason(response) -> None: msg = response.feed_data(b"HTTP/1.1 200\r\n\r\n")[0][0][0] assert msg.version == (1, 1) @@ -882,7 +821,7 @@ def test_http_response_parser_no_reason(response: Any) -> None: assert msg.reason == "" -def test_http_response_parser_lenient_headers(response: Any) -> None: +def test_http_response_parser_lenient_headers(response) -> None: messages, upgrade, tail = response.feed_data( b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n" ) @@ -892,14 +831,14 @@ def test_http_response_parser_lenient_headers(response: Any) -> None: @pytest.mark.dev_mode -def test_http_response_parser_strict_headers(response: Any) -> None: +def test_http_response_parser_strict_headers(response) -> None: if isinstance(response, HttpResponseParserPy): pytest.xfail("Py parser is lenient. May update py-parser later.") with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n") -def test_http_response_parser_bad_crlf(response: Any) -> None: +def test_http_response_parser_bad_crlf(response) -> None: """Still a lot of dodgy servers sending bad requests like this.""" messages, upgrade, tail = response.feed_data( b"HTTP/1.0 200 OK\nFoo: abc\nBar: def\n\nBODY\n" @@ -910,7 +849,7 @@ def test_http_response_parser_bad_crlf(response: Any) -> None: assert msg.headers["Bar"] == "def" -async def test_http_response_parser_bad_chunked_lax(response: Any) -> None: +async def test_http_response_parser_bad_chunked_lax(response) -> None: text = ( b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n" ) @@ -920,9 +859,7 @@ async def test_http_response_parser_bad_chunked_lax(response: Any) -> None: @pytest.mark.dev_mode -async def test_http_response_parser_bad_chunked_strict_py( - loop: Any, protocol: Any -) -> None: +async def test_http_response_parser_bad_chunked_strict_py(loop, protocol) -> None: response = HttpResponseParserPy( protocol, loop, @@ -942,9 +879,7 @@ async def test_http_response_parser_bad_chunked_strict_py( "HttpRequestParserC" not in dir(aiohttp.http_parser), reason="C based HTTP parser not available", ) -async def test_http_response_parser_bad_chunked_strict_c( - loop: Any, protocol: Any -) -> None: +async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None: response = HttpResponseParserC( protocol, loop, @@ -959,27 +894,27 @@ async def test_http_response_parser_bad_chunked_strict_c( response.feed_data(text) -def test_http_response_parser_bad(response: Any) -> None: +def test_http_response_parser_bad(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTT/1\r\n\r\n") -def test_http_response_parser_code_under_100(response: Any) -> None: +def test_http_response_parser_code_under_100(response) -> None: with pytest.raises(http_exceptions.BadStatusLine): response.feed_data(b"HTTP/1.1 99 test\r\n\r\n") -def test_http_response_parser_code_above_999(response: Any) -> None: +def test_http_response_parser_code_above_999(response) -> None: with pytest.raises(http_exceptions.BadStatusLine): response.feed_data(b"HTTP/1.1 9999 test\r\n\r\n") -def test_http_response_parser_code_not_int(response: Any) -> None: +def test_http_response_parser_code_not_int(response) -> None: with pytest.raises(http_exceptions.BadStatusLine): response.feed_data(b"HTTP/1.1 ttt test\r\n\r\n") -def test_http_request_chunked_payload(parser: Any) -> None: +def test_http_request_chunked_payload(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -994,7 +929,7 @@ def test_http_request_chunked_payload(parser: Any) -> None: assert payload.is_eof() -def test_http_request_chunked_payload_and_next_message(parser: Any) -> None: +def test_http_request_chunked_payload_and_next_message(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1016,7 +951,7 @@ def test_http_request_chunked_payload_and_next_message(parser: Any) -> None: assert not payload2.is_eof() -def test_http_request_chunked_payload_chunks(parser: Any) -> None: +def test_http_request_chunked_payload_chunks(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1038,7 +973,7 @@ def test_http_request_chunked_payload_chunks(parser: Any) -> None: assert payload.is_eof() -def test_parse_chunked_payload_chunk_extension(parser: Any) -> None: +def test_parse_chunked_payload_chunk_extension(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1057,9 +992,7 @@ def _test_parse_no_length_or_te_on_post(loop, protocol, request_cls): assert payload.is_eof() -def test_parse_payload_response_without_body( - loop: Any, protocol: Any, response_cls: Any -) -> None: +def test_parse_payload_response_without_body(loop, protocol, response_cls) -> None: parser = response_cls(protocol, loop, 2**16, response_with_body=False) text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 10\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] @@ -1067,7 +1000,7 @@ def test_parse_payload_response_without_body( assert payload.is_eof() -def test_parse_length_payload(response: Any) -> None: +def test_parse_length_payload(response) -> None: text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 4\r\n\r\n" msg, payload = response.feed_data(text)[0][0] assert not payload.is_eof() @@ -1080,138 +1013,13 @@ def test_parse_length_payload(response: Any) -> None: assert b"data" == b"".join(d for d in payload._buffer) -def test_parse_no_length_payload(parser: Any) -> None: +def test_parse_no_length_payload(parser) -> None: text = b"PUT / HTTP/1.1\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() -def test_parse_content_length_payload_multiple(response: Any) -> None: - text = b"HTTP/1.1 200 OK\r\ncontent-length: 5\r\n\r\nfirst" - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == 200 - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Content-Length", "5"), - ] - ) - assert msg.raw_headers == ((b"content-length", b"5"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert not msg.chunked - assert payload.is_eof() - assert b"first" == b"".join(d for d in payload._buffer) - - text = b"HTTP/1.1 200 OK\r\ncontent-length: 6\r\n\r\nsecond" - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == 200 - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Content-Length", "6"), - ] - ) - assert msg.raw_headers == ((b"content-length", b"6"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert not msg.chunked - assert payload.is_eof() - assert b"second" == b"".join(d for d in payload._buffer) - - -def test_parse_content_length_than_chunked_payload(response: Any) -> None: - text = b"HTTP/1.1 200 OK\r\ncontent-length: 5\r\n\r\nfirst" - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == 200 - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Content-Length", "5"), - ] - ) - assert msg.raw_headers == ((b"content-length", b"5"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert not msg.chunked - assert payload.is_eof() - assert b"first" == b"".join(d for d in payload._buffer) - - text = ( - b"HTTP/1.1 200 OK\r\n" - b"transfer-encoding: chunked\r\n\r\n" - b"6\r\nsecond\r\n0\r\n\r\n" - ) - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == 200 - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Transfer-Encoding", "chunked"), - ] - ) - assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert msg.chunked - assert payload.is_eof() - assert b"second" == b"".join(d for d in payload._buffer) - - -@pytest.mark.parametrize("code", (204, 304, 101, 102)) -def test_parse_chunked_payload_empty_body_than_another_chunked( - response: Any, code: int -) -> None: - head = f"HTTP/1.1 {code} OK\r\n".encode() - text = head + b"transfer-encoding: chunked\r\n\r\n" - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == code - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Transfer-Encoding", "chunked"), - ] - ) - assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert msg.chunked - assert payload.is_eof() - - text = ( - b"HTTP/1.1 200 OK\r\n" - b"transfer-encoding: chunked\r\n\r\n" - b"6\r\nsecond\r\n0\r\n\r\n" - ) - msg, payload = response.feed_data(text)[0][0] - assert msg.version == HttpVersion(major=1, minor=1) - assert msg.code == 200 - assert msg.reason == "OK" - assert msg.headers == CIMultiDict( - [ - ("Transfer-Encoding", "chunked"), - ] - ) - assert msg.raw_headers == ((b"transfer-encoding", b"chunked"),) - assert not msg.should_close - assert msg.compression is None - assert not msg.upgrade - assert msg.chunked - assert payload.is_eof() - assert b"second" == b"".join(d for d in payload._buffer) - - -def test_partial_url(parser: Any) -> None: +def test_partial_url(parser) -> None: messages, upgrade, tail = parser.feed_data(b"GET /te") assert len(messages) == 0 messages, upgrade, tail = parser.feed_data(b"st HTTP/1.1\r\n\r\n") @@ -1236,9 +1044,7 @@ def test_partial_url(parser: Any) -> None: ("/path#frag%2520", "/path", {}, "frag%20"), ], ) -def test_parse_uri_percent_encoded( - parser: Any, uri: Any, path: Any, query: Any, fragment: Any -) -> None: +def test_parse_uri_percent_encoded(parser, uri, path, query, fragment) -> None: text = (f"GET {uri} HTTP/1.1\r\n\r\n").encode() messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -1250,7 +1056,7 @@ def test_parse_uri_percent_encoded( assert msg.url.fragment == fragment -def test_parse_uri_utf8(parser: Any) -> None: +def test_parse_uri_utf8(parser) -> None: if not isinstance(parser, HttpRequestParserPy): pytest.xfail("Not valid HTTP. Maybe update py-parser to reject later.") text = ("GET /путь?ключ=знач#фраг HTTP/1.1\r\n\r\n").encode() @@ -1263,7 +1069,7 @@ def test_parse_uri_utf8(parser: Any) -> None: assert msg.url.fragment == "фраг" -def test_parse_uri_utf8_percent_encoded(parser: Any) -> None: +def test_parse_uri_utf8_percent_encoded(parser) -> None: text = ( "GET %s HTTP/1.1\r\n\r\n" % quote("/путь?ключ=знач#фраг", safe="/?=#") ).encode() @@ -1281,13 +1087,14 @@ def test_parse_uri_utf8_percent_encoded(parser: Any) -> None: "HttpRequestParserC" not in dir(aiohttp.http_parser), reason="C based HTTP parser not available", ) -def test_parse_bad_method_for_c_parser_raises(loop: Any, protocol: Any) -> None: +def test_parse_bad_method_for_c_parser_raises(loop, protocol): payload = b"GET1 /test HTTP/1.1\r\n\r\n" parser = HttpRequestParserC( protocol, loop, 2**16, max_line_size=8190, + max_headers=32768, max_field_size=8190, ) @@ -1296,7 +1103,7 @@ def test_parse_bad_method_for_c_parser_raises(loop: Any, protocol: Any) -> None: class TestParsePayload: - async def test_parse_eof_payload(self, stream: Any) -> None: + async def test_parse_eof_payload(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1307,7 +1114,7 @@ async def test_parse_eof_payload(self, stream: Any) -> None: assert out.is_eof() assert [(bytearray(b"data"), 4)] == list(out._buffer) - async def test_parse_no_body(self, stream: Any) -> None: + async def test_parse_no_body(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1316,7 +1123,7 @@ async def test_parse_no_body(self, stream: Any) -> None: assert out.is_eof() assert p.done - async def test_parse_length_payload_eof(self, stream: Any) -> None: + async def test_parse_length_payload_eof(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1327,7 +1134,7 @@ async def test_parse_length_payload_eof(self, stream: Any) -> None: with pytest.raises(http_exceptions.ContentLengthError): p.feed_eof() - async def test_parse_chunked_payload_size_error(self, stream: Any) -> None: + async def test_parse_chunked_payload_size_error(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1336,7 +1143,7 @@ async def test_parse_chunked_payload_size_error(self, stream: Any) -> None: p.feed_data(b"blah\r\n") assert isinstance(out.exception(), http_exceptions.TransferEncodingError) - async def test_parse_chunked_payload_split_end(self, protocol: Any) -> None: + async def test_parse_chunked_payload_split_end(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") @@ -1345,7 +1152,7 @@ async def test_parse_chunked_payload_split_end(self, protocol: Any) -> None: assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end2(self, protocol: Any) -> None: + async def test_parse_chunked_payload_split_end2(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n\r") @@ -1354,9 +1161,7 @@ async def test_parse_chunked_payload_split_end2(self, protocol: Any) -> None: assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers( - self, protocol: Any - ) -> None: + async def test_parse_chunked_payload_split_end_trailers(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") @@ -1366,9 +1171,7 @@ async def test_parse_chunked_payload_split_end_trailers( assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers2( - self, protocol: Any - ) -> None: + async def test_parse_chunked_payload_split_end_trailers2(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n") @@ -1378,9 +1181,7 @@ async def test_parse_chunked_payload_split_end_trailers2( assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers3( - self, protocol: Any - ) -> None: + async def test_parse_chunked_payload_split_end_trailers3(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ") @@ -1389,9 +1190,7 @@ async def test_parse_chunked_payload_split_end_trailers3( assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_parse_chunked_payload_split_end_trailers4( - self, protocol: Any - ) -> None: + async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) p.feed_data(b"4\r\nasdf\r\n0\r\n" b"C") @@ -1400,7 +1199,7 @@ async def test_parse_chunked_payload_split_end_trailers4( assert out.is_eof() assert b"asdf" == b"".join(out._buffer) - async def test_http_payload_parser_length(self, stream: Any) -> None: + async def test_http_payload_parser_length(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1411,7 +1210,7 @@ async def test_http_payload_parser_length(self, stream: Any) -> None: assert b"12" == b"".join(d for d, _ in out._buffer) assert b"45" == tail - async def test_http_payload_parser_deflate(self, stream: Any) -> None: + async def test_http_payload_parser_deflate(self, stream) -> None: # c=compressobj(wbits=15); b''.join([c.compress(b'data'), c.flush()]) COMPRESSED = b"x\x9cKI,I\x04\x00\x04\x00\x01\x9b" @@ -1438,7 +1237,7 @@ async def test_http_payload_parser_deflate_no_hdrs(self, stream: Any) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) assert out.is_eof() - async def test_http_payload_parser_deflate_light(self, stream: Any) -> None: + async def test_http_payload_parser_deflate_light(self, stream) -> None: # c=compressobj(wbits=9); b''.join([c.compress(b'data'), c.flush()]) COMPRESSED = b"\x18\x95KI,I\x04\x00\x04\x00\x01\x9b" @@ -1451,7 +1250,7 @@ async def test_http_payload_parser_deflate_light(self, stream: Any) -> None: assert b"data" == b"".join(d for d, _ in out._buffer) assert out.is_eof() - async def test_http_payload_parser_deflate_split(self, stream: Any) -> None: + async def test_http_payload_parser_deflate_split(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1463,7 +1262,7 @@ async def test_http_payload_parser_deflate_split(self, stream: Any) -> None: p.feed_eof() assert b"data" == b"".join(d for d, _ in out._buffer) - async def test_http_payload_parser_deflate_split_err(self, stream: Any) -> None: + async def test_http_payload_parser_deflate_split_err(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1475,7 +1274,7 @@ async def test_http_payload_parser_deflate_split_err(self, stream: Any) -> None: p.feed_eof() assert b"data" == b"".join(d for d, _ in out._buffer) - async def test_http_payload_parser_length_zero(self, stream: Any) -> None: + async def test_http_payload_parser_length_zero(self, stream) -> None: out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1484,7 +1283,7 @@ async def test_http_payload_parser_length_zero(self, stream: Any) -> None: assert out.is_eof() @pytest.mark.skipif(brotli is None, reason="brotli is not installed") - async def test_http_payload_brotli(self, stream: Any) -> None: + async def test_http_payload_brotli(self, stream) -> None: compressed = brotli.compress(b"brotli data") out = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() @@ -1496,7 +1295,7 @@ async def test_http_payload_brotli(self, stream: Any) -> None: class TestDeflateBuffer: - async def test_feed_data(self, stream: Any) -> None: + async def test_feed_data(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1509,7 +1308,7 @@ async def test_feed_data(self, stream: Any) -> None: dbuf.feed_data(b"xxxx", 4) assert [b"line"] == list(d for d, _ in buf._buffer) - async def test_feed_data_err(self, stream: Any) -> None: + async def test_feed_data_err(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1524,7 +1323,7 @@ async def test_feed_data_err(self, stream: Any) -> None: # Should start with b'x', otherwise code switch mocked decoder. dbuf.feed_data(b"xsomedata", 9) - async def test_feed_eof(self, stream: Any) -> None: + async def test_feed_eof(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1537,7 +1336,7 @@ async def test_feed_eof(self, stream: Any) -> None: assert [b"line"] == list(d for d, _ in buf._buffer) assert buf._eof - async def test_feed_eof_err_deflate(self, stream: Any) -> None: + async def test_feed_eof_err_deflate(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1550,7 +1349,7 @@ async def test_feed_eof_err_deflate(self, stream: Any) -> None: with pytest.raises(http_exceptions.ContentEncodingError): dbuf.feed_eof() - async def test_feed_eof_no_err_gzip(self, stream: Any) -> None: + async def test_feed_eof_no_err_gzip(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1563,7 +1362,7 @@ async def test_feed_eof_no_err_gzip(self, stream: Any) -> None: dbuf.feed_eof() assert [b"line"] == list(d for d, _ in buf._buffer) - async def test_feed_eof_no_err_brotli(self, stream: Any) -> None: + async def test_feed_eof_no_err_brotli(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() ) @@ -1576,7 +1375,7 @@ async def test_feed_eof_no_err_brotli(self, stream: Any) -> None: dbuf.feed_eof() assert [b"line"] == list(d for d, _ in buf._buffer) - async def test_empty_body(self, stream: Any) -> None: + async def test_empty_body(self, stream) -> None: buf = aiohttp.FlowControlDataQueue( stream, 2**16, loop=asyncio.get_event_loop() )