From 68940b9b045fcd675276234ac734c4befc9f67dd Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Thu, 12 Apr 2018 16:14:03 +0300 Subject: [PATCH 001/144] [3.1] fix resolve cancellation (#2910) (#2931) * fix resolve cancellation * fixes based on review * changes based on review * add changes file * rename (cherry picked from commit a7bbaad) Co-authored-by: Alexander Mohr --- CHANGES/2910.bugfix | 1 + aiohttp/connector.py | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 CHANGES/2910.bugfix diff --git a/CHANGES/2910.bugfix b/CHANGES/2910.bugfix new file mode 100644 index 00000000000..e10a8534d06 --- /dev/null +++ b/CHANGES/2910.bugfix @@ -0,0 +1 @@ +fix cancellation broadcast during DNS resolve diff --git a/aiohttp/connector.py b/aiohttp/connector.py index f506230ad42..556d91fdce0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -700,10 +700,7 @@ async def _resolve_host(self, host, port, traces=None): await trace.send_dns_resolvehost_start(host) addrs = await \ - asyncio.shield(self._resolver.resolve(host, - port, - family=self._family), - loop=self._loop) + self._resolver.resolve(host, port, family=self._family) if traces: for trace in traces: await trace.send_dns_resolvehost_end(host) @@ -813,10 +810,13 @@ async def _create_direct_connection(self, req, fingerprint = self._get_fingerprint(req) try: - hosts = await self._resolve_host( + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await asyncio.shield(self._resolve_host( req.url.raw_host, req.port, - traces=traces) + traces=traces), loop=self._loop) except OSError as exc: # in case of proxy it is not ClientProxyConnectionError # it is problem of resolving proxy ip itself From a901b54a5beaec472624f089eb1085a1073a1cf4 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:05:49 +0300 Subject: [PATCH 002/144] Bump to 3.1.3 --- CHANGES.rst | 6 ++++++ CHANGES/2910.bugfix | 1 - aiohttp/__init__.py | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) delete mode 100644 CHANGES/2910.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index 443b6a2671f..85fb7dff3d5 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -14,6 +14,12 @@ Changelog .. towncrier release notes start +3.1.3 (2018-04-12) +================== + +- Fix cancellation broadcast during DNS resolve (#2910) + + 3.1.2 (2018-04-05) ================== diff --git a/CHANGES/2910.bugfix b/CHANGES/2910.bugfix deleted file mode 100644 index e10a8534d06..00000000000 --- a/CHANGES/2910.bugfix +++ /dev/null @@ -1 +0,0 @@ -fix cancellation broadcast during DNS resolve diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 04b35fe26ef..d4087fbd816 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = '3.1.2' +__version__ = '3.1.3' # This relies on each of the submodules having an __all__ variable. From ffd704be3389afe88840da1533b86cfde1d9e066 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:11:35 +0300 Subject: [PATCH 003/144] Fix links --- CHANGES.rst | 54 +-- HISTORY.rst | 1210 +++++++++++++++++++++++++-------------------------- 2 files changed, 632 insertions(+), 632 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 85fb7dff3d5..02689ac2a1b 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -17,14 +17,14 @@ Changelog 3.1.3 (2018-04-12) ================== -- Fix cancellation broadcast during DNS resolve (#2910) +- Fix cancellation broadcast during DNS resolve (`#2910 `_) 3.1.2 (2018-04-05) ================== -- Make ``LineTooLong`` exception more detailed about actual data size (#2863) -- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (#2909) +- Make ``LineTooLong`` exception more detailed about actual data size (`#2863 `_) +- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (`#2909 `_) 3.1.1 (2018-03-27) @@ -32,7 +32,7 @@ Changelog - Support *asynchronous iterators* (and *asynchronous generators* as well) in both client and server API as request / response BODY - payloads. (#2802) + payloads. (`#2802 `_) 3.1.0 (2018-03-21) @@ -77,48 +77,48 @@ Features -------- - Relax JSON content-type checking in the ``ClientResponse.json()`` to allow - "application/xxx+json" instead of strict "application/json". (#2206) -- Bump C HTTP parser to version 2.8 (#2730) + "application/xxx+json" instead of strict "application/json". (`#2206 `_) +- Bump C HTTP parser to version 2.8 (`#2730 `_) - Accept a coroutine as an application factory in ``web.run_app`` and gunicorn - worker. (#2739) -- Implement application cleanup context (``app.cleanup_ctx`` property). (#2747) -- Make ``writer.write_headers`` a coroutine. (#2762) -- Add tracking signals for getting request/response bodies. (#2767) + worker. (`#2739 `_) +- Implement application cleanup context (``app.cleanup_ctx`` property). (`#2747 `_) +- Make ``writer.write_headers`` a coroutine. (`#2762 `_) +- Add tracking signals for getting request/response bodies. (`#2767 `_) - Deprecate ClientResponseError.code in favor of .status to keep similarity - with response classes. (#2781) -- Implement ``app.add_routes()`` method. (#2787) -- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (#2795) + with response classes. (`#2781 `_) +- Implement ``app.add_routes()`` method. (`#2787 `_) +- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (`#2795 `_) - Install a test event loop as default by ``asyncio.set_event_loop()``. The change affects aiohttp test utils but backward compatibility is not broken - for 99.99% of use cases. (#2804) + for 99.99% of use cases. (`#2804 `_) - Refactor ``ClientResponse`` constructor: make logically required constructor - arguments mandatory, drop ``_post_init()`` method. (#2820) -- Use ``app.add_routes()`` in server docs everywhere (#2830) + arguments mandatory, drop ``_post_init()`` method. (`#2820 `_) +- Use ``app.add_routes()`` in server docs everywhere (`#2830 `_) - Websockets refactoring, all websocket writer methods are converted into - coroutines. (#2836) -- Provide ``Content-Range`` header for ``Range`` requests (#2844) + coroutines. (`#2836 `_) +- Provide ``Content-Range`` header for ``Range`` requests (`#2844 `_) Bugfixes -------- -- Fix websocket client return EofStream. (#2784) -- Fix websocket demo. (#2789) +- Fix websocket client return EofStream. (`#2784 `_) +- Fix websocket demo. (`#2789 `_) - Property ``BaseRequest.http_range`` now returns a python-like slice when requesting the tail of the range. It's now indicated by a negative value in - ``range.start`` rather then in ``range.stop`` (#2805) + ``range.start`` rather then in ``range.stop`` (`#2805 `_) - Close a connection if an unexpected exception occurs while sending a request - (#2827) -- Fix firing DNS tracing events. (#2841) + (`#2827 `_) +- Fix firing DNS tracing events. (`#2841 `_) Improved Documentation ---------------------- - Change ``ClientResponse.json()`` documentation to reflect that it now - allows "application/xxx+json" content-types (#2206) + allows "application/xxx+json" content-types (`#2206 `_) - Document behavior when cchardet detects encodings that are unknown to Python. - (#2732) -- Add diagrams for tracing request life style. (#2748) + (`#2732 `_) +- Add diagrams for tracing request life style. (`#2748 `_) - Drop removed functionality for passing ``StreamReader`` as data at client - side. (#2793) + side. (`#2793 `_) diff --git a/HISTORY.rst b/HISTORY.rst index 567f5ca02f2..328e1ac1744 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,41 +2,41 @@ ================== - Close a connection if an unexpected exception occurs while sending a request - (#2827) + (`#2827 `_) 3.0.8 (2018-03-12) ================== -- Use ``asyncio.current_task()`` on Python 3.7 (#2825) +- Use ``asyncio.current_task()`` on Python 3.7 (`#2825 `_) 3.0.7 (2018-03-08) ================== -- Fix SSL proxy support by client. (#2810) +- Fix SSL proxy support by client. (`#2810 `_) - Restore a imperative check in ``setup.py`` for python version. The check works in parallel to environment marker. As effect a error about unsupported Python versions is raised even on outdated systems with very old - ``setuptools`` version installed. (#2813) + ``setuptools`` version installed. (`#2813 `_) 3.0.6 (2018-03-05) ================== - Add ``_reuse_address`` and ``_reuse_port`` to - ``web_runner.TCPSite.__slots__``. (#2792) + ``web_runner.TCPSite.__slots__``. (`#2792 `_) 3.0.5 (2018-02-27) ================== - Fix ``InvalidStateError`` on processing a sequence of two - ``RequestHandler.data_received`` calls on web server. (#2773) + ``RequestHandler.data_received`` calls on web server. (`#2773 `_) 3.0.4 (2018-02-26) ================== -- Fix ``IndexError`` in HTTP request handling by server. (#2752) -- Fix MultipartWriter.append* no longer returning part/payload. (#2759) +- Fix ``IndexError`` in HTTP request handling by server. (`#2752 `_) +- Fix MultipartWriter.append* no longer returning part/payload. (`#2759 `_) 3.0.3 (2018-02-25) @@ -67,120 +67,120 @@ Security Fix Features -------- -- Speed up the `PayloadWriter.write` method for large request bodies. (#2126) -- StreamResponse and Response are now MutableMappings. (#2246) +- Speed up the `PayloadWriter.write` method for large request bodies. (`#2126 `_) +- StreamResponse and Response are now MutableMappings. (`#2246 `_) - ClientSession publishes a set of signals to track the HTTP request execution. - (#2313) -- Content-Disposition fast access in ClientResponse (#2455) -- Added support to Flask-style decorators with class-based Views. (#2472) -- Signal handlers (registered callbacks) should be coroutines. (#2480) -- Support ``async with test_client.ws_connect(...)`` (#2525) + (`#2313 `_) +- Content-Disposition fast access in ClientResponse (`#2455 `_) +- Added support to Flask-style decorators with class-based Views. (`#2472 `_) +- Signal handlers (registered callbacks) should be coroutines. (`#2480 `_) +- Support ``async with test_client.ws_connect(...)`` (`#2525 `_) - Introduce *site* and *application runner* as underlying API for `web.run_app` - implementation. (#2530) -- Only quote multipart boundary when necessary and sanitize input (#2544) + implementation. (`#2530 `_) +- Only quote multipart boundary when necessary and sanitize input (`#2544 `_) - Make the `aiohttp.ClientResponse.get_encoding` method public with the - processing of invalid charset while detecting content encoding. (#2549) + processing of invalid charset while detecting content encoding. (`#2549 `_) - Add optional configurable per message compression for - `ClientWebSocketResponse` and `WebSocketResponse`. (#2551) + `ClientWebSocketResponse` and `WebSocketResponse`. (`#2551 `_) - Add hysteresis to `StreamReader` to prevent flipping between paused and - resumed states too often. (#2555) -- Support `.netrc` by `trust_env` (#2581) + resumed states too often. (`#2555 `_) +- Support `.netrc` by `trust_env` (`#2581 `_) - Avoid to create a new resource when adding a route with the same name and - path of the last added resource (#2586) -- `MultipartWriter.boundary` is `str` now. (#2589) + path of the last added resource (`#2586 `_) +- `MultipartWriter.boundary` is `str` now. (`#2589 `_) - Allow a custom port to be used by `TestServer` (and associated pytest - fixtures) (#2613) -- Add param access_log_class to web.run_app function (#2615) -- Add ``ssl`` parameter to client API (#2626) + fixtures) (`#2613 `_) +- Add param access_log_class to web.run_app function (`#2615 `_) +- Add ``ssl`` parameter to client API (`#2626 `_) - Fixes performance issue introduced by #2577. When there are no middlewares - installed by the user, no additional and useless code is executed. (#2629) -- Rename PayloadWriter to StreamWriter (#2654) + installed by the user, no additional and useless code is executed. (`#2629 `_) +- Rename PayloadWriter to StreamWriter (`#2654 `_) - New options *reuse_port*, *reuse_address* are added to `run_app` and - `TCPSite`. (#2679) -- Use custom classes to pass client signals parameters (#2686) -- Use ``attrs`` library for data classes, replace `namedtuple`. (#2690) -- Pytest fixtures renaming, add ``aiohttp_`` prefix (#2578) + `TCPSite`. (`#2679 `_) +- Use custom classes to pass client signals parameters (`#2686 `_) +- Use ``attrs`` library for data classes, replace `namedtuple`. (`#2690 `_) +- Pytest fixtures renaming, add ``aiohttp_`` prefix (`#2578 `_) - Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line - parameters (#2578) + parameters (`#2578 `_) Bugfixes -------- - Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not support HTTP2 yet, the protocol is not upgraded but response is handled - correctly. (#2277) + correctly. (`#2277 `_) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector (#2408) -- Fix connector convert OSError to ClientConnectorError (#2423) -- Fix connection attempts for multiple dns hosts (#2424) -- Fix writing to closed transport by raising `asyncio.CancelledError` (#2499) + connector (`#2408 `_) +- Fix connector convert OSError to ClientConnectorError (`#2423 `_) +- Fix connection attempts for multiple dns hosts (`#2424 `_) +- Fix writing to closed transport by raising `asyncio.CancelledError` (`#2499 `_) - Fix warning in `ClientSession.__del__` by stopping to try to close it. - (#2523) -- Fixed race-condition for iterating addresses from the DNSCache. (#2620) -- Fix default value of `access_log_format` argument in `web.run_app` (#2649) -- Freeze sub-application on adding to parent app (#2656) -- Do percent encoding for `.url_for()` parameters (#2668) + (`#2523 `_) +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 `_) +- Fix default value of `access_log_format` argument in `web.run_app` (`#2649 `_) +- Freeze sub-application on adding to parent app (`#2656 `_) +- Do percent encoding for `.url_for()` parameters (`#2668 `_) - Correctly process request start time and multiple request/response - headers in access log extra (#2641) + headers in access log extra (`#2641 `_) Improved Documentation ---------------------- - Improve tutorial docs, using `literalinclude` to link to the actual files. - (#2396) -- Small improvement docs: better example for file uploads. (#2401) -- Rename `from_env` to `trust_env` in client reference. (#2451) + (`#2396 `_) +- Small improvement docs: better example for file uploads. (`#2401 `_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 `_) - Fixed mistype in `Proxy Support` section where `trust_env` parameter was used in `session.get("http://python.org", trust_env=True)` method instead of aiohttp.ClientSession constructor as follows: - `aiohttp.ClientSession(trust_env=True)`. (#2688) -- Fix issue with unittest example not compiling in testing docs. (#2717) + `aiohttp.ClientSession(trust_env=True)`. (`#2688 `_) +- Fix issue with unittest example not compiling in testing docs. (`#2717 `_) Deprecations and Removals ------------------------- -- Simplify HTTP pipelining implementation (#2109) -- Drop `StreamReaderPayload` and `DataQueuePayload`. (#2257) -- Drop `md5` and `sha1` finger-prints (#2267) -- Drop WSMessage.tp (#2321) +- Simplify HTTP pipelining implementation (`#2109 `_) +- Drop `StreamReaderPayload` and `DataQueuePayload`. (`#2257 `_) +- Drop `md5` and `sha1` finger-prints (`#2267 `_) +- Drop WSMessage.tp (`#2321 `_) - Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax. - (#2343) -- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (#2348) -- Drop `resolve` param from TCPConnector. (#2377) -- Add DeprecationWarning for returning HTTPException (#2415) + (`#2343 `_) +- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (`#2348 `_) +- Drop `resolve` param from TCPConnector. (`#2377 `_) +- Add DeprecationWarning for returning HTTPException (`#2415 `_) - `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are - genuine async functions now. (#2475) + genuine async functions now. (`#2475 `_) - Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal handlers should be coroutines, support for regular functions is dropped. - (#2480) + (`#2480 `_) - `StreamResponse.drain()` is not a part of public API anymore, just use `await StreamResponse.write()`. `StreamResponse.write` is converted to async - function. (#2483) + function. (`#2483 `_) - Drop deprecated `slow_request_timeout` param and `**kwargs`` from - `RequestHandler`. (#2500) -- Drop deprecated `resource.url()`. (#2501) -- Remove `%u` and `%l` format specifiers from access log format. (#2506) -- Drop deprecated `request.GET` property. (#2547) + `RequestHandler`. (`#2500 `_) +- Drop deprecated `resource.url()`. (`#2501 `_) +- Remove `%u` and `%l` format specifiers from access log format. (`#2506 `_) +- Drop deprecated `request.GET` property. (`#2547 `_) - Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`, merge `FlowControlStreamReader` functionality into `StreamReader`, drop - `FlowControlStreamReader` name. (#2555) + `FlowControlStreamReader` name. (`#2555 `_) - Do not create a new resource on `router.add_get(..., allow_head=True)` - (#2585) + (`#2585 `_) - Drop access to TCP tuning options from PayloadWriter and Response classes - (#2604) -- Drop deprecated `encoding` parameter from client API (#2606) + (`#2604 `_) +- Drop deprecated `encoding` parameter from client API (`#2606 `_) - Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in - client API (#2626) -- Get rid of the legacy class StreamWriter. (#2651) -- Forbid non-strings in `resource.url_for()` parameters. (#2668) + client API (`#2626 `_) +- Get rid of the legacy class StreamWriter. (`#2651 `_) +- Forbid non-strings in `resource.url_for()` parameters. (`#2668 `_) - Deprecate inheritance from ``ClientSession`` and ``web.Application`` and custom user attributes for ``ClientSession``, ``web.Request`` and - ``web.Application`` (#2691) + ``web.Application`` (`#2691 `_) - Drop `resp = await aiohttp.request(...)` syntax for sake of `async with - aiohttp.request(...) as resp:`. (#2540) + aiohttp.request(...) as resp:`. (`#2540 `_) - Forbid synchronous context managers for `ClientSession` and test - server/client. (#2362) + server/client. (`#2362 `_) Misc @@ -192,88 +192,88 @@ Misc 2.3.10 (2018-02-02) =================== -- Fix 100% CPU usage on HTTP GET and websocket connection just after it (#1955) +- Fix 100% CPU usage on HTTP GET and websocket connection just after it (`#1955 `_) -- Patch broken `ssl.match_hostname()` on Python<3.7 (#2674) +- Patch broken `ssl.match_hostname()` on Python<3.7 (`#2674 `_) 2.3.9 (2018-01-16) ================== -- Fix colon handing in path for dynamic resources (#2670) +- Fix colon handing in path for dynamic resources (`#2670 `_) 2.3.8 (2018-01-15) ================== - Do not use `yarl.unquote` internal function in aiohttp. Fix - incorrectly unquoted path part in URL dispatcher (#2662) + incorrectly unquoted path part in URL dispatcher (`#2662 `_) -- Fix compatibility with `yarl==1.0.0` (#2662) +- Fix compatibility with `yarl==1.0.0` (`#2662 `_) 2.3.7 (2017-12-27) ================== -- Fixed race-condition for iterating addresses from the DNSCache. (#2620) -- Fix docstring for request.host (#2591) -- Fix docstring for request.remote (#2592) +- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 `_) +- Fix docstring for request.host (`#2591 `_) +- Fix docstring for request.remote (`#2592 `_) 2.3.6 (2017-12-04) ================== -- Correct `request.app` context (for handlers not just middlewares). (#2577) +- Correct `request.app` context (for handlers not just middlewares). (`#2577 `_) 2.3.5 (2017-11-30) ================== -- Fix compatibility with `pytest` 3.3+ (#2565) +- Fix compatibility with `pytest` 3.3+ (`#2565 `_) 2.3.4 (2017-11-29) ================== - Make `request.app` point to proper application instance when using nested - applications (with middlewares). (#2550) + applications (with middlewares). (`#2550 `_) - Change base class of ClientConnectorSSLError to ClientSSLError from - ClientConnectorError. (#2563) + ClientConnectorError. (`#2563 `_) - Return client connection back to free pool on error in `connector.connect()`. - (#2567) + (`#2567 `_) 2.3.3 (2017-11-17) ================== - Having a `;` in Response content type does not assume it contains a charset - anymore. (#2197) + anymore. (`#2197 `_) - Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7. - (#2476) + (`#2476 `_) - Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`. - (#2491) + (`#2491 `_) - Fix warning in `ClientSession.__del__` by stopping to try to close it. - (#2523) + (`#2523 `_) - Fixed typo's in Third-party libraries page. And added async-v20 to the list - (#2510) + (`#2510 `_) 2.3.2 (2017-11-01) ================== -- Fix passing client max size on cloning request obj. (#2385) +- Fix passing client max size on cloning request obj. (`#2385 `_) - Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy - connector. (#2408) -- Drop generated `_http_parser` shared object from tarball distribution. (#2414) -- Fix connector convert OSError to ClientConnectorError. (#2423) -- Fix connection attempts for multiple dns hosts. (#2424) + connector. (`#2408 `_) +- Drop generated `_http_parser` shared object from tarball distribution. (`#2414 `_) +- Fix connector convert OSError to ClientConnectorError. (`#2423 `_) +- Fix connection attempts for multiple dns hosts. (`#2424 `_) - Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the - `aiohttp.web.run_app` function. (#2431) -- `_SessionRequestContextManager` closes the session properly now. (#2441) -- Rename `from_env` to `trust_env` in client reference. (#2451) + `aiohttp.web.run_app` function. (`#2431 `_) +- `_SessionRequestContextManager` closes the session properly now. (`#2441 `_) +- Rename `from_env` to `trust_env` in client reference. (`#2451 `_) 2.3.1 (2017-10-18) ================== -- Relax attribute lookup in warning about old-styled middleware (#2340) +- Relax attribute lookup in warning about old-styled middleware (`#2340 `_) 2.3.0 (2017-10-18) @@ -282,104 +282,104 @@ Misc Features -------- -- Add SSL related params to `ClientSession.request` (#1128) -- Make enable_compression work on HTTP/1.0 (#1828) -- Deprecate registering synchronous web handlers (#1993) +- Add SSL related params to `ClientSession.request` (`#1128 `_) +- Make enable_compression work on HTTP/1.0 (`#1828 `_) +- Deprecate registering synchronous web handlers (`#1993 `_) - Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared - in case-insensitive way. (#1994) + in case-insensitive way. (`#1994 `_) - Improvement for `normalize_path_middleware`. Added possibility to handle URLs - with query string. (#1995) -- Use towncrier for CHANGES.txt build (#1997) -- Implement `trust_env=True` param in `ClientSession`. (#1998) -- Added variable to customize proxy headers (#2001) -- Implement `router.add_routes` and router decorators. (#2004) + with query string. (`#1995 `_) +- Use towncrier for CHANGES.txt build (`#1997 `_) +- Implement `trust_env=True` param in `ClientSession`. (`#1998 `_) +- Added variable to customize proxy headers (`#2001 `_) +- Implement `router.add_routes` and router decorators. (`#2004 `_) - Deprecated `BaseRequest.has_body` in favor of `BaseRequest.can_read_body` Added `BaseRequest.body_exists` - attribute that stays static for the lifetime of the request (#2005) -- Provide `BaseRequest.loop` attribute (#2024) + attribute that stays static for the lifetime of the request (`#2005 `_) +- Provide `BaseRequest.loop` attribute (`#2024 `_) - Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message - (#2026) + (`#2026 `_) - Responses to redirects without Location header are returned instead of - raising a RuntimeError (#2030) + raising a RuntimeError (`#2030 `_) - Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to - AioHTTPTestCase (#2032) -- Add automatically a SafeChildWatcher to the test loop (#2058) -- add ability to disable automatic response decompression (#2110) + AioHTTPTestCase (`#2032 `_) +- Add automatically a SafeChildWatcher to the test loop (`#2058 `_) +- add ability to disable automatic response decompression (`#2110 `_) - Add support for throttling DNS request, avoiding the requests saturation when there is a miss in the DNS cache and many requests getting into the connector - at the same time. (#2111) + at the same time. (`#2111 `_) - Use request for getting access log information instead of message/transport pair. Add `RequestBase.remote` property for accessing to IP of client - initiated HTTP request. (#2123) + initiated HTTP request. (`#2123 `_) - json() raises a ContentTypeError exception if the content-type does not meet - the requirements instead of raising a generic ClientResponseError. (#2136) + the requirements instead of raising a generic ClientResponseError. (`#2136 `_) - Make the HTTP client able to return HTTP chunks when chunked transfer - encoding is used. (#2150) + encoding is used. (`#2150 `_) - add `append_version` arg into `StaticResource.url` and `StaticResource.url_for` methods for getting an url with hash (version) of - the file. (#2157) + the file. (`#2157 `_) - Fix parsing the Forwarded header. * commas and semicolons are allowed inside quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; * non-standard parameters are allowed (although this alone could be easily done - in the previous parser). (#2173) + in the previous parser). (`#2173 `_) - Don't require ssl module to run. aiohttp does not require SSL to function. The code paths involved with SSL will only be hit upon SSL usage. Raise `RuntimeError` if HTTPS protocol is required but ssl module is not present. - (#2221) -- Accept coroutine fixtures in pytest plugin (#2223) -- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (#2227) -- Speed up Signals when there are no receivers (#2229) + (`#2221 `_) +- Accept coroutine fixtures in pytest plugin (`#2223 `_) +- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (`#2227 `_) +- Speed up Signals when there are no receivers (`#2229 `_) - Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL. - (#2241) -- Move `DummyCookieJar` into `cookiejar.py` (#2242) -- `run_app`: Make `print=None` disable printing (#2260) + (`#2241 `_) +- Move `DummyCookieJar` into `cookiejar.py` (`#2242 `_) +- `run_app`: Make `print=None` disable printing (`#2260 `_) - Support `brotli` encoding (generic-purpose lossless compression algorithm) - (#2270) + (`#2270 `_) - Add server support for WebSockets Per-Message Deflate. Add client option to add deflate compress header in WebSockets request header. If calling ClientSession.ws_connect() with `compress=15` the client will support deflate - compress negotiation. (#2273) + compress negotiation. (`#2273 `_) - Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by - `client.ws_connect`. (#2292) + `client.ws_connect`. (`#2292 `_) - Added `aiohttp.ClientConnectorSSLError` when connection fails due - `ssl.SSLError` (#2294) -- `aiohttp.web.Application.make_handler` support `access_log_class` (#2315) -- Build HTTP parser extension in non-strict mode by default. (#2332) + `ssl.SSLError` (`#2294 `_) +- `aiohttp.web.Application.make_handler` support `access_log_class` (`#2315 `_) +- Build HTTP parser extension in non-strict mode by default. (`#2332 `_) Bugfixes -------- -- Clear auth information on redirecting to other domain (#1699) -- Fix missing app.loop on startup hooks during tests (#2060) +- Clear auth information on redirecting to other domain (`#1699 `_) +- Fix missing app.loop on startup hooks during tests (`#2060 `_) - Fix issue with synchronous session closing when using `ClientSession` as an - asynchronous context manager. (#2063) + asynchronous context manager. (`#2063 `_) - Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases. - (#2084) + (`#2084 `_) - Force use of IPv4 during test, this will make tests run in a Docker container - (#2104) + (`#2104 `_) - Warnings about unawaited coroutines now correctly point to the user's code. - (#2106) + (`#2106 `_) - Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()` - generator. (#2112) -- Support HTTP 308 Permanent redirect in client class. (#2114) -- Fix `FileResponse` sending empty chunked body on 304. (#2143) + generator. (`#2112 `_) +- Support HTTP 308 Permanent redirect in client class. (`#2114 `_) +- Fix `FileResponse` sending empty chunked body on 304. (`#2143 `_) - Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default. - (#2167) -- Fix parsing the Forwarded header according to RFC 7239. (#2170) -- Securely determining remote/scheme/host #2171 (#2171) -- Fix header name parsing, if name is split into multiple lines (#2183) + (`#2167 `_) +- Fix parsing the Forwarded header according to RFC 7239. (`#2170 `_) +- Securely determining remote/scheme/host #2171 (`#2171 `_) +- Fix header name parsing, if name is split into multiple lines (`#2183 `_) - Handle session close during connection, `KeyError: - ` (#2193) + ` (`#2193 `_) - Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a - string (#2201) + string (`#2201 `_) - Raise OSError on async DNS lookup if resolved domain is an alias for another - one, which does not have an A or CNAME record. (#2231) -- Fix incorrect warning in `StreamReader`. (#2251) -- Properly clone state of web request (#2284) + one, which does not have an A or CNAME record. (`#2231 `_) +- Fix incorrect warning in `StreamReader`. (`#2251 `_) +- Properly clone state of web request (`#2284 `_) - Fix C HTTP parser for cases when status line is split into different TCP - packets. (#2311) -- Fix `web.FileResponse` overriding user supplied Content-Type (#2317) + packets. (`#2311 `_) +- Fix `web.FileResponse` overriding user supplied Content-Type (`#2317 `_) Improved Documentation @@ -387,32 +387,32 @@ Improved Documentation - Add a note about possible performance degradation in `await resp.text()` if charset was not provided by `Content-Type` HTTP header. Pass explicit - encoding to solve it. (#1811) -- Drop `disqus` widget from documentation pages. (#2018) -- Add a graceful shutdown section to the client usage documentation. (#2039) -- Document `connector_owner` parameter. (#2072) -- Update the doc of web.Application (#2081) -- Fix mistake about access log disabling. (#2085) + encoding to solve it. (`#1811 `_) +- Drop `disqus` widget from documentation pages. (`#2018 `_) +- Add a graceful shutdown section to the client usage documentation. (`#2039 `_) +- Document `connector_owner` parameter. (`#2072 `_) +- Update the doc of web.Application (`#2081 `_) +- Fix mistake about access log disabling. (`#2085 `_) - Add example usage of on_startup and on_shutdown signals by creating and - disposing an aiopg connection engine. (#2131) + disposing an aiopg connection engine. (`#2131 `_) - Document `encoded=True` for `yarl.URL`, it disables all yarl transformations. - (#2198) + (`#2198 `_) - Document that all app's middleware factories are run for every request. - (#2225) + (`#2225 `_) - Reflect the fact that default resolver is threaded one starting from aiohttp - 1.1 (#2228) + 1.1 (`#2228 `_) Deprecations and Removals ------------------------- -- Drop deprecated `Server.finish_connections` (#2006) +- Drop deprecated `Server.finish_connections` (`#2006 `_) - Drop %O format from logging, use %b instead. Drop %e format from logging, - environment variables are not supported anymore. (#2123) -- Drop deprecated secure_proxy_ssl_header support (#2171) + environment variables are not supported anymore. (`#2123 `_) +- Drop deprecated secure_proxy_ssl_header support (`#2171 `_) - Removed TimeService in favor of simple caching. TimeService also had a bug - where it lost about 0.5 seconds per second. (#2176) -- Drop unused response_factory from static files API (#2290) + where it lost about 0.5 seconds per second. (`#2176 `_) +- Drop unused response_factory from static files API (`#2290 `_) Misc @@ -425,13 +425,13 @@ Misc ================== - Don't raise deprecation warning on - `loop.run_until_complete(client.close())` (#2065) + `loop.run_until_complete(client.close())` (`#2065 `_) 2.2.4 (2017-08-02) ================== - Fix issue with synchronous session closing when using ClientSession - as an asynchronous context manager. (#2063) + as an asynchronous context manager. (`#2063 `_) 2.2.3 (2017-07-04) ================== @@ -449,51 +449,51 @@ Misc - Relax `yarl` requirement to 0.11+ -- Backport #2026: `session.close` *is* a coroutine (#2029) +- Backport #2026: `session.close` *is* a coroutine (`#2029 `_) 2.2.0 (2017-06-20) ================== -- Add doc for add_head, update doc for add_get. (#1944) +- Add doc for add_head, update doc for add_get. (`#1944 `_) - Fixed consecutive calls for `Response.write_eof`. - Retain method attributes (e.g. :code:`__doc__`) when registering synchronous - handlers for resources. (#1953) + handlers for resources. (`#1953 `_) -- Added signal TERM handling in `run_app` to gracefully exit (#1932) +- Added signal TERM handling in `run_app` to gracefully exit (`#1932 `_) -- Fix websocket issues caused by frame fragmentation. (#1962) +- Fix websocket issues caused by frame fragmentation. (`#1962 `_) - Raise RuntimeError is you try to set the Content Length and enable - chunked encoding at the same time (#1941) + chunked encoding at the same time (`#1941 `_) - Small update for `unittest_run_loop` -- Use CIMultiDict for ClientRequest.skip_auto_headers (#1970) +- Use CIMultiDict for ClientRequest.skip_auto_headers (`#1970 `_) - Fix wrong startup sequence: test server and `run_app()` are not raise - `DeprecationWarning` now (#1947) + `DeprecationWarning` now (`#1947 `_) -- Make sure cleanup signal is sent if startup signal has been sent (#1959) +- Make sure cleanup signal is sent if startup signal has been sent (`#1959 `_) -- Fixed server keep-alive handler, could cause 100% cpu utilization (#1955) +- Fixed server keep-alive handler, could cause 100% cpu utilization (`#1955 `_) - Connection can be destroyed before response get processed if - `await aiohttp.request(..)` is used (#1981) + `await aiohttp.request(..)` is used (`#1981 `_) -- MultipartReader does not work with -OO (#1969) +- MultipartReader does not work with -OO (`#1969 `_) -- Fixed `ClientPayloadError` with blank `Content-Encoding` header (#1931) +- Fixed `ClientPayloadError` with blank `Content-Encoding` header (`#1931 `_) -- Support `deflate` encoding implemented in `httpbin.org/deflate` (#1918) +- Support `deflate` encoding implemented in `httpbin.org/deflate` (`#1918 `_) -- Fix BadStatusLine caused by extra `CRLF` after `POST` data (#1792) +- Fix BadStatusLine caused by extra `CRLF` after `POST` data (`#1792 `_) -- Keep a reference to `ClientSession` in response object (#1985) +- Keep a reference to `ClientSession` in response object (`#1985 `_) -- Deprecate undocumented `app.on_loop_available` signal (#1978) +- Deprecate undocumented `app.on_loop_available` signal (`#1978 `_) @@ -504,54 +504,54 @@ Misc https://github.com/PyO3/tokio - Write to transport ``\r\n`` before closing after keepalive timeout, - otherwise client can not detect socket disconnection. (#1883) + otherwise client can not detect socket disconnection. (`#1883 `_) - Only call `loop.close` in `run_app` if the user did *not* supply a loop. Useful for allowing clients to specify their own cleanup before closing the asyncio loop if they wish to tightly control loop behavior -- Content disposition with semicolon in filename (#917) +- Content disposition with semicolon in filename (`#917 `_) -- Added `request_info` to response object and `ClientResponseError`. (#1733) +- Added `request_info` to response object and `ClientResponseError`. (`#1733 `_) -- Added `history` to `ClientResponseError`. (#1741) +- Added `history` to `ClientResponseError`. (`#1741 `_) -- Allow to disable redirect url re-quoting (#1474) +- Allow to disable redirect url re-quoting (`#1474 `_) -- Handle RuntimeError from transport (#1790) +- Handle RuntimeError from transport (`#1790 `_) -- Dropped "%O" in access logger (#1673) +- Dropped "%O" in access logger (`#1673 `_) - Added `args` and `kwargs` to `unittest_run_loop`. Useful with other - decorators, for example `@patch`. (#1803) + decorators, for example `@patch`. (`#1803 `_) -- Added `iter_chunks` to response.content object. (#1805) +- Added `iter_chunks` to response.content object. (`#1805 `_) - Avoid creating TimerContext when there is no timeout to allow - compatibility with Tornado. (#1817) (#1180) + compatibility with Tornado. (`#1817 `_) (`#1180 `_) - Add `proxy_from_env` to `ClientRequest` to read from environment - variables. (#1791) + variables. (`#1791 `_) -- Add DummyCookieJar helper. (#1830) +- Add DummyCookieJar helper. (`#1830 `_) -- Fix assertion errors in Python 3.4 from noop helper. (#1847) +- Fix assertion errors in Python 3.4 from noop helper. (`#1847 `_) -- Do not unquote `+` in match_info values (#1816) +- Do not unquote `+` in match_info values (`#1816 `_) - Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and - host resolution. (#1134) + host resolution. (`#1134 `_) -- Fix sub-application middlewares resolution order (#1853) +- Fix sub-application middlewares resolution order (`#1853 `_) -- Fix applications comparison (#1866) +- Fix applications comparison (`#1866 `_) -- Fix static location in index when prefix is used (#1662) +- Fix static location in index when prefix is used (`#1662 `_) -- Make test server more reliable (#1896) +- Make test server more reliable (`#1896 `_) - Extend list of web exceptions, add HTTPUnprocessableEntity, - HTTPFailedDependency, HTTPInsufficientStorage status codes (#1920) + HTTPFailedDependency, HTTPInsufficientStorage status codes (`#1920 `_) 2.0.7 (2017-04-12) @@ -559,57 +559,57 @@ Misc - Fix *pypi* distribution -- Fix exception description (#1807) +- Fix exception description (`#1807 `_) -- Handle socket error in FileResponse (#1773) +- Handle socket error in FileResponse (`#1773 `_) -- Cancel websocket heartbeat on close (#1793) +- Cancel websocket heartbeat on close (`#1793 `_) 2.0.6 (2017-04-04) ================== -- Keeping blank values for `request.post()` and `multipart.form()` (#1765) +- Keeping blank values for `request.post()` and `multipart.form()` (`#1765 `_) -- TypeError in data_received of ResponseHandler (#1770) +- TypeError in data_received of ResponseHandler (`#1770 `_) - Fix ``web.run_app`` not to bind to default host-port pair if only socket is - passed (#1786) + passed (`#1786 `_) 2.0.5 (2017-03-29) ================== -- Memory leak with aiohttp.request (#1756) +- Memory leak with aiohttp.request (`#1756 `_) - Disable cleanup closed ssl transports by default. - Exception in request handling if the server responds before the body - is sent (#1761) + is sent (`#1761 `_) 2.0.4 (2017-03-27) ================== -- Memory leak with aiohttp.request (#1756) +- Memory leak with aiohttp.request (`#1756 `_) -- Encoding is always UTF-8 in POST data (#1750) +- Encoding is always UTF-8 in POST data (`#1750 `_) -- Do not add "Content-Disposition" header by default (#1755) +- Do not add "Content-Disposition" header by default (`#1755 `_) 2.0.3 (2017-03-24) ================== -- Call https website through proxy will cause error (#1745) +- Call https website through proxy will cause error (`#1745 `_) -- Fix exception on multipart/form-data post if content-type is not set (#1743) +- Fix exception on multipart/form-data post if content-type is not set (`#1743 `_) 2.0.2 (2017-03-21) ================== -- Fixed Application.on_loop_available signal (#1739) +- Fixed Application.on_loop_available signal (`#1739 `_) - Remove debug code @@ -617,21 +617,21 @@ Misc 2.0.1 (2017-03-21) ================== -- Fix allow-head to include name on route (#1737) +- Fix allow-head to include name on route (`#1737 `_) -- Fixed AttributeError in WebSocketResponse.can_prepare (#1736) +- Fixed AttributeError in WebSocketResponse.can_prepare (`#1736 `_) 2.0.0 (2017-03-20) ================== -- Added `json` to `ClientSession.request()` method (#1726) +- Added `json` to `ClientSession.request()` method (`#1726 `_) - Added session's `raise_for_status` parameter, automatically calls - raise_for_status() on any request. (#1724) + raise_for_status() on any request. (`#1724 `_) - `response.json()` raises `ClientReponseError` exception if response's - content type does not match (#1723) + content type does not match (`#1723 `_) - Cleanup timer and loop handle on any client exception. @@ -641,25 +641,25 @@ Misc `2.0.0rc1` (2017-03-15) ======================= -- Properly handle payload errors (#1710) +- Properly handle payload errors (`#1710 `_) -- Added `ClientWebSocketResponse.get_extra_info()` (#1717) +- Added `ClientWebSocketResponse.get_extra_info()` (`#1717 `_) - It is not possible to combine Transfer-Encoding and chunked parameter, - same for compress and Content-Encoding (#1655) + same for compress and Content-Encoding (`#1655 `_) - Connector's `limit` parameter indicates total concurrent connections. - New `limit_per_host` added, indicates total connections per endpoint. (#1601) + New `limit_per_host` added, indicates total connections per endpoint. (`#1601 `_) -- Use url's `raw_host` for name resolution (#1685) +- Use url's `raw_host` for name resolution (`#1685 `_) -- Change `ClientResponse.url` to `yarl.URL` instance (#1654) +- Change `ClientResponse.url` to `yarl.URL` instance (`#1654 `_) -- Add max_size parameter to web.Request reading methods (#1133) +- Add max_size parameter to web.Request reading methods (`#1133 `_) -- Web Request.post() stores data in temp files (#1469) +- Web Request.post() stores data in temp files (`#1469 `_) -- Add the `allow_head=True` keyword argument for `add_get` (#1618) +- Add the `allow_head=True` keyword argument for `add_get` (`#1618 `_) - `run_app` and the Command Line Interface now support serving over Unix domain sockets for faster inter-process communication. @@ -668,54 +668,54 @@ Misc e.g. for socket-based activated applications, when binding of a socket is done by the parent process. -- Implementation for Trailer headers parser is broken (#1619) +- Implementation for Trailer headers parser is broken (`#1619 `_) - Fix FileResponse to not fall on bad request (range out of file size) - Fix FileResponse to correct stream video to Chromes -- Deprecate public low-level api (#1657) +- Deprecate public low-level api (`#1657 `_) - Deprecate `encoding` parameter for ClientSession.request() method -- Dropped aiohttp.wsgi (#1108) +- Dropped aiohttp.wsgi (`#1108 `_) - Dropped `version` from ClientSession.request() method -- Dropped websocket version 76 support (#1160) +- Dropped websocket version 76 support (`#1160 `_) -- Dropped: `aiohttp.protocol.HttpPrefixParser` (#1590) +- Dropped: `aiohttp.protocol.HttpPrefixParser` (`#1590 `_) - Dropped: Servers response's `.started`, `.start()` and - `.can_start()` method (#1591) + `.can_start()` method (`#1591 `_) - Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead (#1592) + use `app.add_subapp()` instead (`#1592 `_) -- Dropped: `Application.finish()` and `Application.register_on_finish()` (#1602) +- Dropped: `Application.finish()` and `Application.register_on_finish()` (`#1602 `_) - Dropped: `web.Request.GET` and `web.Request.POST` - Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and - aiohttp.ws_connect() (#1593) + aiohttp.ws_connect() (`#1593 `_) -- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (#1605) +- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (`#1605 `_) - Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and - `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (#1606) + `keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (`#1606 `_) - Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`, `.clear_resolved_hosts()` attributes and `resolve` constructor - parameter (#1607) + parameter (`#1607 `_) -- Dropped `ProxyConnector` (#1609) +- Dropped `ProxyConnector` (`#1609 `_) 1.3.5 (2017-03-16) ================== -- Fixed None timeout support (#1720) +- Fixed None timeout support (`#1720 `_) 1.3.4 (2017-03-14) @@ -729,30 +729,30 @@ Misc - Fix file_sender to correct stream video to Chromes -- Fix NotImplementedError server exception (#1703) +- Fix NotImplementedError server exception (`#1703 `_) -- Clearer error message for URL without a host name. (#1691) +- Clearer error message for URL without a host name. (`#1691 `_) -- Silence deprecation warning in __repr__ (#1690) +- Silence deprecation warning in __repr__ (`#1690 `_) -- IDN + HTTPS = `ssl.CertificateError` (#1685) +- IDN + HTTPS = `ssl.CertificateError` (`#1685 `_) 1.3.3 (2017-02-19) ================== -- Fixed memory leak in time service (#1656) +- Fixed memory leak in time service (`#1656 `_) 1.3.2 (2017-02-16) ================== -- Awaiting on WebSocketResponse.send_* does not work (#1645) +- Awaiting on WebSocketResponse.send_* does not work (`#1645 `_) - Fix multiple calls to client ws_connect when using a shared header - dict (#1643) + dict (`#1643 `_) -- Make CookieJar.filter_cookies() accept plain string parameter. (#1636) +- Make CookieJar.filter_cookies() accept plain string parameter. (`#1636 `_) 1.3.1 (2017-02-09) @@ -760,65 +760,65 @@ Misc - Handle CLOSING in WebSocketResponse.__anext__ -- Fixed AttributeError 'drain' for server websocket handler (#1613) +- Fixed AttributeError 'drain' for server websocket handler (`#1613 `_) 1.3.0 (2017-02-08) ================== - Multipart writer validates the data on append instead of on a - request send (#920) + request send (`#920 `_) - Multipart reader accepts multipart messages with or without their epilogue - to consistently handle valid and legacy behaviors (#1526) (#1581) + to consistently handle valid and legacy behaviors (`#1526 `_) (`#1581 `_) - Separate read + connect + request timeouts # 1523 -- Do not swallow Upgrade header (#1587) +- Do not swallow Upgrade header (`#1587 `_) -- Fix polls demo run application (#1487) +- Fix polls demo run application (`#1487 `_) -- Ignore unknown 1XX status codes in client (#1353) +- Ignore unknown 1XX status codes in client (`#1353 `_) -- Fix sub-Multipart messages missing their headers on serialization (#1525) +- Fix sub-Multipart messages missing their headers on serialization (`#1525 `_) - Do not use readline when reading the content of a part - in the multipart reader (#1535) + in the multipart reader (`#1535 `_) -- Add optional flag for quoting `FormData` fields (#916) +- Add optional flag for quoting `FormData` fields (`#916 `_) -- 416 Range Not Satisfiable if requested range end > file size (#1588) +- 416 Range Not Satisfiable if requested range end > file size (`#1588 `_) -- Having a `:` or `@` in a route does not work (#1552) +- Having a `:` or `@` in a route does not work (`#1552 `_) - Added `receive_timeout` timeout for websocket to receive complete - message. (#1325) + message. (`#1325 `_) - Added `heartbeat` parameter for websocket to automatically send - `ping` message. (#1024) (#777) + `ping` message. (`#1024 `_) (`#777 `_) -- Remove `web.Application` dependency from `web.UrlDispatcher` (#1510) +- Remove `web.Application` dependency from `web.UrlDispatcher` (`#1510 `_) -- Accepting back-pressure from slow websocket clients (#1367) +- Accepting back-pressure from slow websocket clients (`#1367 `_) -- Do not pause transport during set_parser stage (#1211) +- Do not pause transport during set_parser stage (`#1211 `_) -- Lingering close does not terminate before timeout (#1559) +- Lingering close does not terminate before timeout (`#1559 `_) -- `setsockopt` may raise `OSError` exception if socket is closed already (#1595) +- `setsockopt` may raise `OSError` exception if socket is closed already (`#1595 `_) -- Lots of CancelledError when requests are interrupted (#1565) +- Lots of CancelledError when requests are interrupted (`#1565 `_) - Allow users to specify what should happen to decoding errors - when calling a responses `text()` method (#1542) + when calling a responses `text()` method (`#1542 `_) -- Back port std module `http.cookies` for python3.4.2 (#1566) +- Back port std module `http.cookies` for python3.4.2 (`#1566 `_) -- Maintain url's fragment in client response (#1314) +- Maintain url's fragment in client response (`#1314 `_) -- Allow concurrently close WebSocket connection (#754) +- Allow concurrently close WebSocket connection (`#754 `_) -- Gzipped responses with empty body raises ContentEncodingError (#609) +- Gzipped responses with empty body raises ContentEncodingError (`#609 `_) - Return 504 if request handle raises TimeoutError. @@ -828,25 +828,25 @@ Misc message during client response release - Abort closed ssl client transports, broken servers can keep socket - open un-limit time (#1568) + open un-limit time (`#1568 `_) - Log warning instead of `RuntimeError` is websocket connection is closed. - Deprecated: `aiohttp.protocol.HttpPrefixParser` - will be removed in 1.4 (#1590) + will be removed in 1.4 (`#1590 `_) - Deprecated: Servers response's `.started`, `.start()` and - `.can_start()` method will be removed in 1.4 (#1591) + `.can_start()` method will be removed in 1.4 (`#1591 `_) - Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated - use `app.add_subapp()` instead, will be removed in 1.4 (#1592) + use `app.add_subapp()` instead, will be removed in 1.4 (`#1592 `_) - Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect() - will be removed in 1.4 (#1593) + will be removed in 1.4 (`#1593 `_) - Deprecated: `Application.finish()` and `Application.register_on_finish()` - will be removed in 1.4 (#1602) + will be removed in 1.4 (`#1602 `_) 1.2.0 (2016-12-17) @@ -854,13 +854,13 @@ Misc - Extract `BaseRequest` from `web.Request`, introduce `web.Server` (former `RequestHandlerFactory`), introduce new low-level web server - which is not coupled with `web.Application` and routing (#1362) + which is not coupled with `web.Application` and routing (`#1362 `_) -- Make `TestServer.make_url` compatible with `yarl.URL` (#1389) +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 `_) -- Implement range requests for static files (#1382) +- Implement range requests for static files (`#1382 `_) -- Support task attribute for StreamResponse (#1410) +- Support task attribute for StreamResponse (`#1410 `_) - Drop `TestClient.app` property, use `TestClient.server.app` instead (BACKWARD INCOMPATIBLE) @@ -871,84 +871,84 @@ Misc - `TestClient.server` property returns a test server instance, was `asyncio.AbstractServer` (BACKWARD INCOMPATIBLE) -- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (#1201) +- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (`#1201 `_) - Call worker_int and worker_abort callbacks in - `Gunicorn[UVLoop]WebWorker` (#1202) + `Gunicorn[UVLoop]WebWorker` (`#1202 `_) -- Has functional tests for client proxy (#1218) +- Has functional tests for client proxy (`#1218 `_) -- Fix bugs with client proxy target path and proxy host with port (#1413) +- Fix bugs with client proxy target path and proxy host with port (`#1413 `_) -- Fix bugs related to the use of unicode hostnames (#1444) +- Fix bugs related to the use of unicode hostnames (`#1444 `_) -- Preserve cookie quoting/escaping (#1453) +- Preserve cookie quoting/escaping (`#1453 `_) -- FileSender will send gzipped response if gzip version available (#1426) +- FileSender will send gzipped response if gzip version available (`#1426 `_) - Don't override `Content-Length` header in `web.Response` if no body - was set (#1400) + was set (`#1400 `_) -- Introduce `router.post_init()` for solving (#1373) +- Introduce `router.post_init()` for solving (`#1373 `_) - Fix raise error in case of multiple calls of `TimeServive.stop()` -- Allow to raise web exceptions on router resolving stage (#1460) +- Allow to raise web exceptions on router resolving stage (`#1460 `_) -- Add a warning for session creation outside of coroutine (#1468) +- Add a warning for session creation outside of coroutine (`#1468 `_) - Avoid a race when application might start accepting incoming requests but startup signals are not processed yet e98e8c6 - Raise a `RuntimeError` when trying to change the status of the HTTP response - after the headers have been sent (#1480) + after the headers have been sent (`#1480 `_) -- Fix bug with https proxy acquired cleanup (#1340) +- Fix bug with https proxy acquired cleanup (`#1340 `_) -- Use UTF-8 as the default encoding for multipart text parts (#1484) +- Use UTF-8 as the default encoding for multipart text parts (`#1484 `_) 1.1.6 (2016-11-28) ================== - Fix `BodyPartReader.read_chunk` bug about returns zero bytes before - `EOF` (#1428) + `EOF` (`#1428 `_) 1.1.5 (2016-11-16) ================== -- Fix static file serving in fallback mode (#1401) +- Fix static file serving in fallback mode (`#1401 `_) 1.1.4 (2016-11-14) ================== -- Make `TestServer.make_url` compatible with `yarl.URL` (#1389) +- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 `_) - Generate informative exception on redirects from server which - does not provide redirection headers (#1396) + does not provide redirection headers (`#1396 `_) 1.1.3 (2016-11-10) ================== -- Support *root* resources for sub-applications (#1379) +- Support *root* resources for sub-applications (`#1379 `_) 1.1.2 (2016-11-08) ================== -- Allow starting variables with an underscore (#1379) +- Allow starting variables with an underscore (`#1379 `_) -- Properly process UNIX sockets by gunicorn worker (#1375) +- Properly process UNIX sockets by gunicorn worker (`#1375 `_) - Fix ordering for `FrozenList` -- Don't propagate pre and post signals to sub-application (#1377) +- Don't propagate pre and post signals to sub-application (`#1377 `_) 1.1.1 (2016-11-04) ================== -- Fix documentation generation (#1120) +- Fix documentation generation (`#1120 `_) 1.1.0 (2016-11-03) ================== @@ -956,23 +956,23 @@ Misc - Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE) - Use `yarl.URL` in client API. The change is 99% backward compatible - but `ClientResponse.url` is an `yarl.URL` instance now. (#1217) + but `ClientResponse.url` is an `yarl.URL` instance now. (`#1217 `_) -- Close idle keep-alive connections on shutdown (#1222) +- Close idle keep-alive connections on shutdown (`#1222 `_) -- Modify regex in AccessLogger to accept underscore and numbers (#1225) +- Modify regex in AccessLogger to accept underscore and numbers (`#1225 `_) - Use `yarl.URL` in web server API. `web.Request.rel_url` and `web.Request.url` are added. URLs and templates are percent-encoded - now. (#1224) + now. (`#1224 `_) -- Accept `yarl.URL` by server redirections (#1278) +- Accept `yarl.URL` by server redirections (`#1278 `_) -- Return `yarl.URL` by `.make_url()` testing utility (#1279) +- Return `yarl.URL` by `.make_url()` testing utility (`#1279 `_) -- Properly format IPv6 addresses by `aiohttp.web.run_app` (#1139) +- Properly format IPv6 addresses by `aiohttp.web.run_app` (`#1139 `_) -- Use `yarl.URL` by server API (#1288) +- Use `yarl.URL` by server API (`#1288 `_) * Introduce `resource.url_for()`, deprecate `resource.url()`. @@ -983,38 +983,38 @@ Misc * Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`, `StaticRoute`, `ResourceAdapter`. -- Revert `resp.url` back to `str`, introduce `resp.url_obj` (#1292) +- Revert `resp.url` back to `str`, introduce `resp.url_obj` (`#1292 `_) -- Raise ValueError if BasicAuth login has a ":" character (#1307) +- Raise ValueError if BasicAuth login has a ":" character (`#1307 `_) - Fix bug when ClientRequest send payload file with opened as - open('filename', 'r+b') (#1306) + open('filename', 'r+b') (`#1306 `_) -- Enhancement to AccessLogger (pass *extra* dict) (#1303) +- Enhancement to AccessLogger (pass *extra* dict) (`#1303 `_) -- Show more verbose message on import errors (#1319) +- Show more verbose message on import errors (`#1319 `_) -- Added save and load functionality for `CookieJar` (#1219) +- Added save and load functionality for `CookieJar` (`#1219 `_) -- Added option on `StaticRoute` to follow symlinks (#1299) +- Added option on `StaticRoute` to follow symlinks (`#1299 `_) -- Force encoding of `application/json` content type to utf-8 (#1339) +- Force encoding of `application/json` content type to utf-8 (`#1339 `_) -- Fix invalid invocations of `errors.LineTooLong` (#1335) +- Fix invalid invocations of `errors.LineTooLong` (`#1335 `_) -- Websockets: Stop `async for` iteration when connection is closed (#1144) +- Websockets: Stop `async for` iteration when connection is closed (`#1144 `_) -- Ensure TestClient HTTP methods return a context manager (#1318) +- Ensure TestClient HTTP methods return a context manager (`#1318 `_) - Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function - if `ClientSession` object is closed by client when reading data. (#1323) + if `ClientSession` object is closed by client when reading data. (`#1323 `_) -- Document deployment without `Gunicorn` (#1120) +- Document deployment without `Gunicorn` (`#1120 `_) - Add deprecation warning for MD5 and SHA1 digests when used for fingerprint - of site certs in TCPConnector. (#1186) + of site certs in TCPConnector. (`#1186 `_) -- Implement sub-applications (#1301) +- Implement sub-applications (`#1301 `_) - Don't inherit `web.Request` from `dict` but implement `MutableMapping` protocol. @@ -1039,55 +1039,55 @@ Misc boost of your application -- a couple DB requests and business logic is still the main bottleneck. -- Boost performance by adding a custom time service (#1350) +- Boost performance by adding a custom time service (`#1350 `_) - Extend `ClientResponse` with `content_type` and `charset` - properties like in `web.Request`. (#1349) + properties like in `web.Request`. (`#1349 `_) -- Disable aiodns by default (#559) +- Disable aiodns by default (`#559 `_) - Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default. -- Implement `web.Request.clone()` (#1361) +- Implement `web.Request.clone()` (`#1361 `_) 1.0.5 (2016-10-11) ================== - Fix StreamReader._read_nowait to return all available - data up to the requested amount (#1297) + data up to the requested amount (`#1297 `_) 1.0.4 (2016-09-22) ================== - Fix FlowControlStreamReader.read_nowait so that it checks - whether the transport is paused (#1206) + whether the transport is paused (`#1206 `_) 1.0.2 (2016-09-22) ================== -- Make CookieJar compatible with 32-bit systems (#1188) +- Make CookieJar compatible with 32-bit systems (`#1188 `_) -- Add missing `WSMsgType` to `web_ws.__all__`, see (#1200) +- Add missing `WSMsgType` to `web_ws.__all__`, see (`#1200 `_) -- Fix `CookieJar` ctor when called with `loop=None` (#1203) +- Fix `CookieJar` ctor when called with `loop=None` (`#1203 `_) -- Fix broken upper-casing in wsgi support (#1197) +- Fix broken upper-casing in wsgi support (`#1197 `_) 1.0.1 (2016-09-16) ================== - Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake - of backward compatibility (#1178) + of backward compatibility (`#1178 `_) - Tune alabaster schema. - Use `text/html` content type for displaying index pages by static file handler. -- Fix `AssertionError` in static file handling (#1177) +- Fix `AssertionError` in static file handling (`#1177 `_) - Fix access log formats `%O` and `%b` for static file handling @@ -1099,9 +1099,9 @@ Misc ================== - Change default size for client session's connection pool from - unlimited to 20 (#977) + unlimited to 20 (`#977 `_) -- Add IE support for cookie deletion. (#994) +- Add IE support for cookie deletion. (`#994 `_) - Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD INCOMPATIBLE) @@ -1110,26 +1110,26 @@ Misc method (BACKWARD INCOMPATIBLE) - Avoid using of mutable CIMultiDict kw param in make_mocked_request - (#997) + (`#997 `_) - Make WebSocketResponse.close a little bit faster by avoiding new task creating just for timeout measurement - Add `proxy` and `proxy_auth` params to `client.get()` and family, - deprecate `ProxyConnector` (#998) + deprecate `ProxyConnector` (`#998 `_) - Add support for websocket send_json and receive_json, synchronize - server and client API for websockets (#984) + server and client API for websockets (`#984 `_) - Implement router shourtcuts for most useful HTTP methods, use `app.router.add_get()`, `app.router.add_post()` etc. instead of - `app.router.add_route()` (#986) + `app.router.add_route()` (`#986 `_) -- Support SSL connections for gunicorn worker (#1003) +- Support SSL connections for gunicorn worker (`#1003 `_) - Move obsolete examples to legacy folder -- Switch to multidict 2.0 and title-cased strings (#1015) +- Switch to multidict 2.0 and title-cased strings (`#1015 `_) - `{FOO}e` logger format is case-sensitive now @@ -1145,9 +1145,9 @@ Misc - Remove deprecated decode param from resp.read(decode=True) -- Use 5min default client timeout (#1028) +- Use 5min default client timeout (`#1028 `_) -- Relax HTTP method validation in UrlDispatcher (#1037) +- Relax HTTP method validation in UrlDispatcher (`#1037 `_) - Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()` should be present) @@ -1157,84 +1157,84 @@ Misc - Link header for 451 status code is mandatory -- Fix test_client fixture to allow multiple clients per test (#1072) +- Fix test_client fixture to allow multiple clients per test (`#1072 `_) -- make_mocked_request now accepts dict as headers (#1073) +- make_mocked_request now accepts dict as headers (`#1073 `_) - Add Python 3.5.2/3.6+ compatibility patch for async generator - protocol change (#1082) + protocol change (`#1082 `_) -- Improvement test_client can accept instance object (#1083) +- Improvement test_client can accept instance object (`#1083 `_) -- Simplify ServerHttpProtocol implementation (#1060) +- Simplify ServerHttpProtocol implementation (`#1060 `_) - Add a flag for optional showing directory index for static file - handling (#921) + handling (`#921 `_) -- Define `web.Application.on_startup()` signal handler (#1103) +- Define `web.Application.on_startup()` signal handler (`#1103 `_) -- Drop ChunkedParser and LinesParser (#1111) +- Drop ChunkedParser and LinesParser (`#1111 `_) -- Call `Application.startup` in GunicornWebWorker (#1105) +- Call `Application.startup` in GunicornWebWorker (`#1105 `_) - Fix client handling hostnames with 63 bytes when a port is given in - the url (#1044) + the url (`#1044 `_) -- Implement proxy support for ClientSession.ws_connect (#1025) +- Implement proxy support for ClientSession.ws_connect (`#1025 `_) -- Return named tuple from WebSocketResponse.can_prepare (#1016) +- Return named tuple from WebSocketResponse.can_prepare (`#1016 `_) -- Fix access_log_format in `GunicornWebWorker` (#1117) +- Fix access_log_format in `GunicornWebWorker` (`#1117 `_) -- Setup Content-Type to application/octet-stream by default (#1124) +- Setup Content-Type to application/octet-stream by default (`#1124 `_) - Deprecate debug parameter from app.make_handler(), use - `Application(debug=True)` instead (#1121) + `Application(debug=True)` instead (`#1121 `_) -- Remove fragment string in request path (#846) +- Remove fragment string in request path (`#846 `_) -- Use aiodns.DNSResolver.gethostbyname() if available (#1136) +- Use aiodns.DNSResolver.gethostbyname() if available (`#1136 `_) -- Fix static file sending on uvloop when sendfile is available (#1093) +- Fix static file sending on uvloop when sendfile is available (`#1093 `_) -- Make prettier urls if query is empty dict (#1143) +- Make prettier urls if query is empty dict (`#1143 `_) -- Fix redirects for HEAD requests (#1147) +- Fix redirects for HEAD requests (`#1147 `_) -- Default value for `StreamReader.read_nowait` is -1 from now (#1150) +- Default value for `StreamReader.read_nowait` is -1 from now (`#1150 `_) - `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now - (BACKWARD INCOMPATIBLE) (#1150) + (BACKWARD INCOMPATIBLE) (`#1150 `_) -- Streams documentation added (#1150) +- Streams documentation added (`#1150 `_) -- Add `multipart` coroutine method for web Request object (#1067) +- Add `multipart` coroutine method for web Request object (`#1067 `_) -- Publish ClientSession.loop property (#1149) +- Publish ClientSession.loop property (`#1149 `_) -- Fix static file with spaces (#1140) +- Fix static file with spaces (`#1140 `_) -- Fix piling up asyncio loop by cookie expiration callbacks (#1061) +- Fix piling up asyncio loop by cookie expiration callbacks (`#1061 `_) - Drop `Timeout` class for sake of `async_timeout` external library. `aiohttp.Timeout` is an alias for `async_timeout.timeout` - `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by - default (BACKWARD INCOMPATIBLE) (#1152) + default (BACKWARD INCOMPATIBLE) (`#1152 `_) - `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by - default (BACKWARD INCOMPATIBLE) (#1152) + default (BACKWARD INCOMPATIBLE) (`#1152 `_) -- Conform to RFC3986 - do not include url fragments in client requests (#1174) +- Conform to RFC3986 - do not include url fragments in client requests (`#1174 `_) -- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (#1173) +- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (`#1173 `_) -- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (#1173) +- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (`#1173 `_) - Fix clashing cookies with have the same name but belong to different - domains (BACKWARD INCOMPATIBLE) (#1125) + domains (BACKWARD INCOMPATIBLE) (`#1125 `_) -- Support binary Content-Transfer-Encoding (#1169) +- Support binary Content-Transfer-Encoding (`#1169 `_) 0.22.5 (08-02-2016) @@ -1245,17 +1245,17 @@ Misc 0.22.3 (07-26-2016) =================== -- Do not filter cookies if unsafe flag provided (#1005) +- Do not filter cookies if unsafe flag provided (`#1005 `_) 0.22.2 (07-23-2016) =================== -- Suppress CancelledError when Timeout raises TimeoutError (#970) +- Suppress CancelledError when Timeout raises TimeoutError (`#970 `_) - Don't expose `aiohttp.__version__` -- Add unsafe parameter to CookieJar (#968) +- Add unsafe parameter to CookieJar (`#968 `_) - Use unsafe cookie jar in test client tools @@ -1266,88 +1266,88 @@ Misc =================== - Large cookie expiration/max-age does not break an event loop from now - (fixes (#967)) + (fixes (`#967 `_)) 0.22.0 (07-15-2016) =================== -- Fix bug in serving static directory (#803) +- Fix bug in serving static directory (`#803 `_) -- Fix command line arg parsing (#797) +- Fix command line arg parsing (`#797 `_) -- Fix a documentation chapter about cookie usage (#790) +- Fix a documentation chapter about cookie usage (`#790 `_) -- Handle empty body with gzipped encoding (#758) +- Handle empty body with gzipped encoding (`#758 `_) -- Support 451 Unavailable For Legal Reasons http status (#697) +- Support 451 Unavailable For Legal Reasons http status (`#697 `_) -- Fix Cookie share example and few small typos in docs (#817) +- Fix Cookie share example and few small typos in docs (`#817 `_) -- UrlDispatcher.add_route with partial coroutine handler (#814) +- UrlDispatcher.add_route with partial coroutine handler (`#814 `_) -- Optional support for aiodns (#728) +- Optional support for aiodns (`#728 `_) -- Add ServiceRestart and TryAgainLater websocket close codes (#828) +- Add ServiceRestart and TryAgainLater websocket close codes (`#828 `_) -- Fix prompt message for `web.run_app` (#832) +- Fix prompt message for `web.run_app` (`#832 `_) -- Allow to pass None as a timeout value to disable timeout logic (#834) +- Allow to pass None as a timeout value to disable timeout logic (`#834 `_) -- Fix leak of connection slot during connection error (#835) +- Fix leak of connection slot during connection error (`#835 `_) - Gunicorn worker with uvloop support - `aiohttp.worker.GunicornUVLoopWebWorker` (#878) + `aiohttp.worker.GunicornUVLoopWebWorker` (`#878 `_) -- Don't send body in response to HEAD request (#838) +- Don't send body in response to HEAD request (`#838 `_) -- Skip the preamble in MultipartReader (#881) +- Skip the preamble in MultipartReader (`#881 `_) -- Implement BasicAuth decode classmethod. (#744) +- Implement BasicAuth decode classmethod. (`#744 `_) -- Don't crash logger when transport is None (#889) +- Don't crash logger when transport is None (`#889 `_) - Use a create_future compatibility wrapper instead of creating - Futures directly (#896) + Futures directly (`#896 `_) -- Add test utilities to aiohttp (#902) +- Add test utilities to aiohttp (`#902 `_) -- Improve Request.__repr__ (#875) +- Improve Request.__repr__ (`#875 `_) -- Skip DNS resolving if provided host is already an ip address (#874) +- Skip DNS resolving if provided host is already an ip address (`#874 `_) -- Add headers to ClientSession.ws_connect (#785) +- Add headers to ClientSession.ws_connect (`#785 `_) -- Document that server can send pre-compressed data (#906) +- Document that server can send pre-compressed data (`#906 `_) -- Don't add Content-Encoding and Transfer-Encoding if no body (#891) +- Don't add Content-Encoding and Transfer-Encoding if no body (`#891 `_) -- Add json() convenience methods to websocket message objects (#897) +- Add json() convenience methods to websocket message objects (`#897 `_) -- Add client_resp.raise_for_status() (#908) +- Add client_resp.raise_for_status() (`#908 `_) -- Implement cookie filter (#799) +- Implement cookie filter (`#799 `_) -- Include an example of middleware to handle error pages (#909) +- Include an example of middleware to handle error pages (`#909 `_) -- Fix error handling in StaticFileMixin (#856) +- Fix error handling in StaticFileMixin (`#856 `_) -- Add mocked request helper (#900) +- Add mocked request helper (`#900 `_) -- Fix empty ALLOW Response header for cls based View (#929) +- Fix empty ALLOW Response header for cls based View (`#929 `_) -- Respect CONNECT method to implement a proxy server (#847) +- Respect CONNECT method to implement a proxy server (`#847 `_) -- Add pytest_plugin (#914) +- Add pytest_plugin (`#914 `_) - Add tutorial - Add backlog option to support more than 128 (default value in - "create_server" function) concurrent connections (#892) + "create_server" function) concurrent connections (`#892 `_) -- Allow configuration of header size limits (#912) +- Allow configuration of header size limits (`#912 `_) -- Separate sending file logic from StaticRoute dispatcher (#901) +- Separate sending file logic from StaticRoute dispatcher (`#901 `_) - Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE) @@ -1360,28 +1360,28 @@ Misc - Drop all mentions about api changes in documentation for versions older than 0.16 -- Allow to override default cookie jar (#963) +- Allow to override default cookie jar (`#963 `_) - Add manylinux wheel builds -- Dup a socket for sendfile usage (#964) +- Dup a socket for sendfile usage (`#964 `_) 0.21.6 (05-05-2016) =================== -- Drop initial query parameters on redirects (#853) +- Drop initial query parameters on redirects (`#853 `_) 0.21.5 (03-22-2016) =================== -- Fix command line arg parsing (#797) +- Fix command line arg parsing (`#797 `_) 0.21.4 (03-12-2016) =================== - Fix ResourceAdapter: don't add method to allowed if resource is not - match (#826) + match (`#826 `_) - Fix Resource: append found method to returned allowed methods @@ -1389,12 +1389,12 @@ Misc =================== - Fix a regression: support for handling ~/path in static file routes was - broken (#782) + broken (`#782 `_) 0.21.1 (02-10-2016) =================== -- Make new resources classes public (#767) +- Make new resources classes public (`#767 `_) - Add `router.resources()` view @@ -1403,22 +1403,22 @@ Misc 0.21.0 (02-04-2016) =================== -- Introduce on_shutdown signal (#722) +- Introduce on_shutdown signal (`#722 `_) -- Implement raw input headers (#726) +- Implement raw input headers (`#726 `_) -- Implement web.run_app utility function (#734) +- Implement web.run_app utility function (`#734 `_) - Introduce on_cleanup signal - Deprecate Application.finish() / Application.register_on_finish() in favor of on_cleanup. -- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (#729) +- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (`#729 `_) -- Deprecate bare aiohttp.request(), aiohttp.get() and family (#729) +- Deprecate bare aiohttp.request(), aiohttp.get() and family (`#729 `_) -- Refactor keep-alive support (#737): +- Refactor keep-alive support (`#737 `_): - Enable keepalive for HTTP 1.0 by default @@ -1437,18 +1437,18 @@ Misc - don't send `Connection` header for HTTP 1.0 - Add version parameter to ClientSession constructor, - deprecate it for session.request() and family (#736) + deprecate it for session.request() and family (`#736 `_) -- Enable access log by default (#735) +- Enable access log by default (`#735 `_) - Deprecate app.router.register_route() (the method was not documented intentionally BTW). - Deprecate app.router.named_routes() in favor of app.router.named_resources() -- route.add_static accepts pathlib.Path now (#743) +- route.add_static accepts pathlib.Path now (`#743 `_) -- Add command line support: `$ python -m aiohttp.web package.main` (#740) +- Add command line support: `$ python -m aiohttp.web package.main` (`#740 `_) - FAQ section was added to docs. Enjoy and fill free to contribute new topics @@ -1456,32 +1456,32 @@ Misc - Document ClientResponse's host, method, url properties -- Use CORK/NODELAY in client API (#748) +- Use CORK/NODELAY in client API (`#748 `_) - ClientSession.close and Connector.close are coroutines now - Close client connection on exception in ClientResponse.release() -- Allow to read multipart parts without content-length specified (#750) +- Allow to read multipart parts without content-length specified (`#750 `_) -- Add support for unix domain sockets to gunicorn worker (#470) +- Add support for unix domain sockets to gunicorn worker (`#470 `_) -- Add test for default Expect handler (#601) +- Add test for default Expect handler (`#601 `_) - Add the first demo project -- Rename `loader` keyword argument in `web.Request.json` method. (#646) +- Rename `loader` keyword argument in `web.Request.json` method. (`#646 `_) -- Add local socket binding for TCPConnector (#678) +- Add local socket binding for TCPConnector (`#678 `_) 0.20.2 (01-07-2016) =================== -- Enable use of `await` for a class based view (#717) +- Enable use of `await` for a class based view (`#717 `_) -- Check address family to fill wsgi env properly (#718) +- Check address family to fill wsgi env properly (`#718 `_) -- Fix memory leak in headers processing (thanks to Marco Paolini) (#723) +- Fix memory leak in headers processing (thanks to Marco Paolini) (`#723 `_) 0.20.1 (12-30-2015) =================== @@ -1489,7 +1489,7 @@ Misc - Raise RuntimeError is Timeout context manager was used outside of task context. -- Add number of bytes to stream.read_nowait (#700) +- Add number of bytes to stream.read_nowait (`#700 `_) - Use X-FORWARDED-PROTO for wsgi.url_scheme when available @@ -1500,19 +1500,19 @@ Misc - Extend list of web exceptions, add HTTPMisdirectedRequest, HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests, HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates, - HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (#644) + HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (`#644 `_) -- Do not remove AUTHORIZATION header by WSGI handler (#649) +- Do not remove AUTHORIZATION header by WSGI handler (`#649 `_) -- Fix broken support for https proxies with authentication (#617) +- Fix broken support for https proxies with authentication (`#617 `_) - Get REMOTE_* and SEVER_* http vars from headers when listening on - unix socket (#654) + unix socket (`#654 `_) -- Add HTTP 308 support (#663) +- Add HTTP 308 support (`#663 `_) - Add Tf format (time to serve request in seconds, %06f format) to - access log (#669) + access log (`#669 `_) - Remove one and a half years long deprecated ClientResponse.read_and_close() method @@ -1521,77 +1521,77 @@ Misc on sending chunked encoded data - Use TCP_CORK and TCP_NODELAY to optimize network latency and - throughput (#680) + throughput (`#680 `_) -- Websocket XOR performance improved (#687) +- Websocket XOR performance improved (`#687 `_) -- Avoid sending cookie attributes in Cookie header (#613) +- Avoid sending cookie attributes in Cookie header (`#613 `_) - Round server timeouts to seconds for grouping pending calls. That - leads to less amount of poller syscalls e.g. epoll.poll(). (#702) + leads to less amount of poller syscalls e.g. epoll.poll(). (`#702 `_) -- Close connection on websocket handshake error (#703) +- Close connection on websocket handshake error (`#703 `_) -- Implement class based views (#684) +- Implement class based views (`#684 `_) -- Add *headers* parameter to ws_connect() (#709) +- Add *headers* parameter to ws_connect() (`#709 `_) -- Drop unused function `parse_remote_addr()` (#708) +- Drop unused function `parse_remote_addr()` (`#708 `_) -- Close session on exception (#707) +- Close session on exception (`#707 `_) -- Store http code and headers in WSServerHandshakeError (#706) +- Store http code and headers in WSServerHandshakeError (`#706 `_) -- Make some low-level message properties readonly (#710) +- Make some low-level message properties readonly (`#710 `_) 0.19.0 (11-25-2015) =================== -- Memory leak in ParserBuffer (#579) +- Memory leak in ParserBuffer (`#579 `_) - Support gunicorn's `max_requests` settings in gunicorn worker -- Fix wsgi environment building (#573) +- Fix wsgi environment building (`#573 `_) -- Improve access logging (#572) +- Improve access logging (`#572 `_) -- Drop unused host and port from low-level server (#586) +- Drop unused host and port from low-level server (`#586 `_) -- Add Python 3.5 `async for` implementation to server websocket (#543) +- Add Python 3.5 `async for` implementation to server websocket (`#543 `_) - Add Python 3.5 `async for` implementation to client websocket - Add Python 3.5 `async with` implementation to client websocket -- Add charset parameter to web.Response constructor (#593) +- Add charset parameter to web.Response constructor (`#593 `_) - Forbid passing both Content-Type header and content_type or charset params into web.Response constructor -- Forbid duplicating of web.Application and web.Request (#602) +- Forbid duplicating of web.Application and web.Request (`#602 `_) -- Add an option to pass Origin header in ws_connect (#607) +- Add an option to pass Origin header in ws_connect (`#607 `_) -- Add json_response function (#592) +- Add json_response function (`#592 `_) -- Make concurrent connections respect limits (#581) +- Make concurrent connections respect limits (`#581 `_) -- Collect history of responses if redirects occur (#614) +- Collect history of responses if redirects occur (`#614 `_) -- Enable passing pre-compressed data in requests (#621) +- Enable passing pre-compressed data in requests (`#621 `_) -- Expose named routes via UrlDispatcher.named_routes() (#622) +- Expose named routes via UrlDispatcher.named_routes() (`#622 `_) -- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (#629) +- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (`#629 `_) - Use ensure_future if available -- Always quote params for Content-Disposition (#641) +- Always quote params for Content-Disposition (`#641 `_) -- Support async for in multipart reader (#640) +- Support async for in multipart reader (`#640 `_) -- Add Timeout context manager (#611) +- Add Timeout context manager (`#611 `_) 0.18.4 (13-11-2015) =================== @@ -1602,12 +1602,12 @@ Misc 0.18.3 (25-10-2015) =================== -- Fix formatting for _RequestContextManager helper (#590) +- Fix formatting for _RequestContextManager helper (`#590 `_) 0.18.2 (22-10-2015) =================== -- Fix regression for OpenSSL < 1.0.0 (#583) +- Fix regression for OpenSSL < 1.0.0 (`#583 `_) 0.18.1 (20-10-2015) =================== @@ -1619,7 +1619,7 @@ Misc =================== - Use errors.HttpProcessingError.message as HTTP error reason and - message (#459) + message (`#459 `_) - Optimize cythonized multidict a bit @@ -1627,27 +1627,27 @@ Misc - default headers in ClientSession are now case-insensitive -- Make '=' char and 'wss://' schema safe in urls (#477) +- Make '=' char and 'wss://' schema safe in urls (`#477 `_) -- `ClientResponse.close()` forces connection closing by default from now (#479) +- `ClientResponse.close()` forces connection closing by default from now (`#479 `_) N.B. Backward incompatible change: was `.close(force=False) Using `force` parameter for the method is deprecated: use `.release()` instead. -- Properly requote URL's path (#480) +- Properly requote URL's path (`#480 `_) -- add `skip_auto_headers` parameter for client API (#486) +- add `skip_auto_headers` parameter for client API (`#486 `_) -- Properly parse URL path in aiohttp.web.Request (#489) +- Properly parse URL path in aiohttp.web.Request (`#489 `_) -- Raise RuntimeError when chunked enabled and HTTP is 1.0 (#488) +- Raise RuntimeError when chunked enabled and HTTP is 1.0 (`#488 `_) -- Fix a bug with processing io.BytesIO as data parameter for client API (#500) +- Fix a bug with processing io.BytesIO as data parameter for client API (`#500 `_) -- Skip auto-generation of Content-Type header (#507) +- Skip auto-generation of Content-Type header (`#507 `_) -- Use sendfile facility for static file handling (#503) +- Use sendfile facility for static file handling (`#503 `_) - Default `response_factory` in `app.router.add_static` now is `StreamResponse`, not `None`. The functionality is not changed if @@ -1656,17 +1656,17 @@ Misc - Drop `ClientResponse.message` attribute, it was always implementation detail. - Streams are optimized for speed and mostly memory in case of a big - HTTP message sizes (#496) + HTTP message sizes (`#496 `_) - Fix a bug for server-side cookies for dropping cookie and setting it again without Max-Age parameter. -- Don't trim redirect URL in client API (#499) +- Don't trim redirect URL in client API (`#499 `_) -- Extend precision of access log "D" to milliseconds (#527) +- Extend precision of access log "D" to milliseconds (`#527 `_) - Deprecate `StreamResponse.start()` method in favor of - `StreamResponse.prepare()` coroutine (#525) + `StreamResponse.prepare()` coroutine (`#525 `_) `.start()` is still supported but responses begun with `.start()` does not call signal for response preparing to be sent. @@ -1674,48 +1674,48 @@ Misc - Add `StreamReader.__repr__` - Drop Python 3.3 support, from now minimal required version is Python - 3.4.1 (#541) + 3.4.1 (`#541 `_) -- Add `async with` support for `ClientSession.request()` and family (#536) +- Add `async with` support for `ClientSession.request()` and family (`#536 `_) -- Ignore message body on 204 and 304 responses (#505) +- Ignore message body on 204 and 304 responses (`#505 `_) -- `TCPConnector` processed both IPv4 and IPv6 by default (#559) +- `TCPConnector` processed both IPv4 and IPv6 by default (`#559 `_) -- Add `.routes()` view for urldispatcher (#519) +- Add `.routes()` view for urldispatcher (`#519 `_) -- Route name should be a valid identifier name from now (#567) +- Route name should be a valid identifier name from now (`#567 `_) -- Implement server signals (#562) +- Implement server signals (`#562 `_) - Drop a year-old deprecated *files* parameter from client API. -- Added `async for` support for aiohttp stream (#542) +- Added `async for` support for aiohttp stream (`#542 `_) 0.17.4 (09-29-2015) =================== -- Properly parse URL path in aiohttp.web.Request (#489) +- Properly parse URL path in aiohttp.web.Request (`#489 `_) - Add missing coroutine decorator, the client api is await-compatible now 0.17.3 (08-28-2015) =================== -- Remove Content-Length header on compressed responses (#450) +- Remove Content-Length header on compressed responses (`#450 `_) - Support Python 3.5 -- Improve performance of transport in-use list (#472) +- Improve performance of transport in-use list (`#472 `_) -- Fix connection pooling (#473) +- Fix connection pooling (`#473 `_) 0.17.2 (08-11-2015) =================== -- Don't forget to pass `data` argument forward (#462) +- Don't forget to pass `data` argument forward (`#462 `_) -- Fix multipart read bytes count (#463) +- Fix multipart read bytes count (`#463 `_) 0.17.1 (08-10-2015) =================== @@ -1725,28 +1725,28 @@ Misc 0.17.0 (08-04-2015) =================== -- Make StaticRoute support Last-Modified and If-Modified-Since headers (#386) +- Make StaticRoute support Last-Modified and If-Modified-Since headers (`#386 `_) - Add Request.if_modified_since and Stream.Response.last_modified properties -- Fix deflate compression when writing a chunked response (#395) +- Fix deflate compression when writing a chunked response (`#395 `_) - Request`s content-length header is cleared now after redirect from - POST method (#391) + POST method (`#391 `_) -- Return a 400 if server received a non HTTP content (#405) +- Return a 400 if server received a non HTTP content (`#405 `_) -- Fix keep-alive support for aiohttp clients (#406) +- Fix keep-alive support for aiohttp clients (`#406 `_) -- Allow gzip compression in high-level server response interface (#403) +- Allow gzip compression in high-level server response interface (`#403 `_) -- Rename TCPConnector.resolve and family to dns_cache (#415) +- Rename TCPConnector.resolve and family to dns_cache (`#415 `_) -- Make UrlDispatcher ignore quoted characters during url matching (#414) +- Make UrlDispatcher ignore quoted characters during url matching (`#414 `_) Backward-compatibility warning: this may change the url matched by - your queries if they send quoted character (like %2F for /) (#414) + your queries if they send quoted character (like %2F for /) (`#414 `_) -- Use optional cchardet accelerator if present (#418) +- Use optional cchardet accelerator if present (`#418 `_) - Borrow loop from Connector in ClientSession if loop is not set @@ -1755,50 +1755,50 @@ Misc - Add toplevel get(), post(), put(), head(), delete(), options(), patch() coroutines. -- Fix IPv6 support for client API (#425) +- Fix IPv6 support for client API (`#425 `_) -- Pass SSL context through proxy connector (#421) +- Pass SSL context through proxy connector (`#421 `_) - Make the rule: path for add_route should start with slash - Don't process request finishing by low-level server on closed event loop -- Don't override data if multiple files are uploaded with same key (#433) +- Don't override data if multiple files are uploaded with same key (`#433 `_) - Ensure multipart.BodyPartReader.read_chunk read all the necessary data to avoid false assertions about malformed multipart payload -- Don't send body for 204, 205 and 304 http exceptions (#442) +- Don't send body for 204, 205 and 304 http exceptions (`#442 `_) -- Correctly skip Cython compilation in MSVC not found (#453) +- Correctly skip Cython compilation in MSVC not found (`#453 `_) -- Add response factory to StaticRoute (#456) +- Add response factory to StaticRoute (`#456 `_) -- Don't append trailing CRLF for multipart.BodyPartReader (#454) +- Don't append trailing CRLF for multipart.BodyPartReader (`#454 `_) 0.16.6 (07-15-2015) =================== -- Skip compilation on Windows if vcvarsall.bat cannot be found (#438) +- Skip compilation on Windows if vcvarsall.bat cannot be found (`#438 `_) 0.16.5 (06-13-2015) =================== -- Get rid of all comprehensions and yielding in _multidict (#410) +- Get rid of all comprehensions and yielding in _multidict (`#410 `_) 0.16.4 (06-13-2015) =================== - Don't clear current exception in multidict's `__repr__` (cythonized - versions) (#410) + versions) (`#410 `_) 0.16.3 (05-30-2015) =================== -- Fix StaticRoute vulnerability to directory traversal attacks (#380) +- Fix StaticRoute vulnerability to directory traversal attacks (`#380 `_) 0.16.2 (05-27-2015) @@ -1808,26 +1808,26 @@ Misc 3.4.1 instead of 3.4.0 - Add check for presence of loop.is_closed() method before call the - former (#378) + former (`#378 `_) 0.16.1 (05-27-2015) =================== -- Fix regression in static file handling (#377) +- Fix regression in static file handling (`#377 `_) 0.16.0 (05-26-2015) =================== -- Unset waiter future after cancellation (#363) +- Unset waiter future after cancellation (`#363 `_) -- Update request url with query parameters (#372) +- Update request url with query parameters (`#372 `_) - Support new `fingerprint` param of TCPConnector to enable verifying - SSL certificates via MD5, SHA1, or SHA256 digest (#366) + SSL certificates via MD5, SHA1, or SHA256 digest (`#366 `_) - Setup uploaded filename if field value is binary and transfer - encoding is not specified (#349) + encoding is not specified (`#349 `_) - Implement `ClientSession.close()` method @@ -1842,20 +1842,20 @@ Misc - Add `__del__` to client-side objects: sessions, connectors, connections, requests, responses. -- Refactor connections cleanup by connector (#357) +- Refactor connections cleanup by connector (`#357 `_) -- Add `limit` parameter to connector constructor (#358) +- Add `limit` parameter to connector constructor (`#358 `_) -- Add `request.has_body` property (#364) +- Add `request.has_body` property (`#364 `_) -- Add `response_class` parameter to `ws_connect()` (#367) +- Add `response_class` parameter to `ws_connect()` (`#367 `_) - `ProxyConnector` does not support keep-alive requests by default - starting from now (#368) + starting from now (`#368 `_) - Add `connector.force_close` property -- Add ws_connect to ClientSession (#374) +- Add ws_connect to ClientSession (`#374 `_) - Support optional `chunk_size` parameter in `router.add_static()` @@ -1865,7 +1865,7 @@ Misc - Fix graceful shutdown handling -- Fix `Expect` header handling for not found and not allowed routes (#340) +- Fix `Expect` header handling for not found and not allowed routes (`#340 `_) 0.15.2 (04-19-2015) @@ -1877,15 +1877,15 @@ Misc - Allow to match any request method with `*` -- Explicitly call drain on transport (#316) +- Explicitly call drain on transport (`#316 `_) -- Make chardet module dependency mandatory (#318) +- Make chardet module dependency mandatory (`#318 `_) -- Support keep-alive for HTTP 1.0 (#325) +- Support keep-alive for HTTP 1.0 (`#325 `_) -- Do not chunk single file during upload (#327) +- Do not chunk single file during upload (`#327 `_) -- Add ClientSession object for cookie storage and default headers (#328) +- Add ClientSession object for cookie storage and default headers (`#328 `_) - Add `keep_alive_on` argument for HTTP server handler. @@ -1911,13 +1911,13 @@ Misc - Client WebSockets support -- New Multipart system (#273) +- New Multipart system (`#273 `_) -- Support for "Except" header (#287) (#267) +- Support for "Except" header (`#287 `_) (`#267 `_) -- Set default Content-Type for post requests (#184) +- Set default Content-Type for post requests (`#184 `_) -- Fix issue with construction dynamic route with regexps and trailing slash (#266) +- Fix issue with construction dynamic route with regexps and trailing slash (`#266 `_) - Add repr to web.Request @@ -1927,7 +1927,7 @@ Misc - Add repr for web.Application -- Add repr to UrlMappingMatchInfo (#217) +- Add repr to UrlMappingMatchInfo (`#217 `_) - Gunicorn 19.2.x compatibility @@ -1935,29 +1935,29 @@ Misc 0.14.4 (01-29-2015) =================== -- Fix issue with error during constructing of url with regex parts (#264) +- Fix issue with error during constructing of url with regex parts (`#264 `_) 0.14.3 (01-28-2015) =================== -- Use path='/' by default for cookies (#261) +- Use path='/' by default for cookies (`#261 `_) 0.14.2 (01-23-2015) =================== -- Connections leak in BaseConnector (#253) +- Connections leak in BaseConnector (`#253 `_) -- Do not swallow websocket reader exceptions (#255) +- Do not swallow websocket reader exceptions (`#255 `_) -- web.Request's read, text, json are memorized (#250) +- web.Request's read, text, json are memorized (`#250 `_) 0.14.1 (01-15-2015) =================== -- HttpMessage._add_default_headers does not overwrite existing headers (#216) +- HttpMessage._add_default_headers does not overwrite existing headers (`#216 `_) - Expose multidict classes at package level @@ -1996,21 +1996,21 @@ Misc - Server has 75 seconds keepalive timeout now, was non-keepalive by default. -- Application does not accept `**kwargs` anymore ((#243)). +- Application does not accept `**kwargs` anymore ((`#243 `_)). - Request is inherited from dict now for making per-request storage to - middlewares ((#242)). + middlewares ((`#242 `_)). 0.13.1 (12-31-2014) =================== -- Add `aiohttp.web.StreamResponse.started` property (#213) +- Add `aiohttp.web.StreamResponse.started` property (`#213 `_) - HTML escape traceback text in `ServerHttpProtocol.handle_error` - Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request` - on error ((#218)) + on error ((`#218 `_)) 0.13.0 (12-29-2014) @@ -2020,16 +2020,16 @@ Misc - Chain exceptions when raise `ClientRequestError`. -- Support custom regexps in route variables (#204) +- Support custom regexps in route variables (`#204 `_) - Fixed graceful shutdown, disable keep-alive on connection closing. - Decode HTTP message with `utf-8` encoding, some servers send headers - in utf-8 encoding (#207) + in utf-8 encoding (`#207 `_) -- Support `aiohtt.web` middlewares (#209) +- Support `aiohtt.web` middlewares (`#209 `_) -- Add ssl_context to TCPConnector (#206) +- Add ssl_context to TCPConnector (`#206 `_) 0.12.0 (12-12-2014) @@ -2039,7 +2039,7 @@ Misc Sorry, we have to do this. - Automatically force aiohttp.web handlers to coroutines in - `UrlDispatcher.add_route()` (#186) + `UrlDispatcher.add_route()` (`#186 `_) - Rename `Request.POST()` function to `Request.post()` @@ -2068,15 +2068,15 @@ Misc 0.11.0 (11-29-2014) =================== -- Support named routes in `aiohttp.web.UrlDispatcher` (#179) +- Support named routes in `aiohttp.web.UrlDispatcher` (`#179 `_) -- Make websocket subprotocols conform to spec (#181) +- Make websocket subprotocols conform to spec (`#181 `_) 0.10.2 (11-19-2014) =================== -- Don't unquote `environ['PATH_INFO']` in wsgi.py (#177) +- Don't unquote `environ['PATH_INFO']` in wsgi.py (`#177 `_) 0.10.1 (11-17-2014) @@ -2102,54 +2102,54 @@ Misc from 'Can not read status line' to explicit 'Connection closed by server' -- Drop closed connections from connector (#173) +- Drop closed connections from connector (`#173 `_) -- Set server.transport to None on .closing() (#172) +- Set server.transport to None on .closing() (`#172 `_) 0.9.3 (10-30-2014) ================== -- Fix compatibility with asyncio 3.4.1+ (#170) +- Fix compatibility with asyncio 3.4.1+ (`#170 `_) 0.9.2 (10-16-2014) ================== -- Improve redirect handling (#157) +- Improve redirect handling (`#157 `_) -- Send raw files as is (#153) +- Send raw files as is (`#153 `_) -- Better websocket support (#150) +- Better websocket support (`#150 `_) 0.9.1 (08-30-2014) ================== -- Added MultiDict support for client request params and data (#114). +- Added MultiDict support for client request params and data (`#114 `_). -- Fixed parameter type for IncompleteRead exception (#118). +- Fixed parameter type for IncompleteRead exception (`#118 `_). -- Strictly require ASCII headers names and values (#137) +- Strictly require ASCII headers names and values (`#137 `_) -- Keep port in ProxyConnector (#128). +- Keep port in ProxyConnector (`#128 `_). -- Python 3.4.1 compatibility (#131). +- Python 3.4.1 compatibility (`#131 `_). 0.9.0 (07-08-2014) ================== -- Better client basic authentication support (#112). +- Better client basic authentication support (`#112 `_). -- Fixed incorrect line splitting in HttpRequestParser (#97). +- Fixed incorrect line splitting in HttpRequestParser (`#97 `_). - Support StreamReader and DataQueue as request data. -- Client files handling refactoring (#20). +- Client files handling refactoring (`#20 `_). - Backward incompatible: Replace DataQueue with StreamReader for - request payload (#87). + request payload (`#87 `_). 0.8.4 (07-04-2014) @@ -2272,7 +2272,7 @@ Misc - Better support for server exit. -- Read response body until EOF if content-length is not defined (#14) +- Read response body until EOF if content-length is not defined (`#14 `_) 0.6.2 (02-18-2014) From 96e09c6a5c456475ff20ba2fd4c02549b0e56c66 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:13:16 +0300 Subject: [PATCH 004/144] Add changelog links fixer --- tools/fix_changelog.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 tools/fix_changelog.py diff --git a/tools/fix_changelog.py b/tools/fix_changelog.py new file mode 100644 index 00000000000..e5a73f33219 --- /dev/null +++ b/tools/fix_changelog.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +import argparse +import re +import sys +from pathlib import Path + + +PATTERN = re.compile("\(#(\d+)\)") + + +def get_root(script_path): + folder = script_path.absolute().parent + while not (folder / '.git').exists(): + folder = folder.parent + if folder == folder.anchor: + raise RuntimeError("git repo not found") + return folder + + +def main(argv): + parser = argparse.ArgumentParser(description='Expand github links.') + parser.add_argument('filename', default='CHANGES.rst', nargs='?', + help="filename to proess") + args = parser.parse_args() + here = Path(argv[0]) + root = get_root(here) + fname = root / args.filename + + content = fname.read_text() + new = PATTERN.sub( + r'(`#\1 `_)', + content) + + fname.write_text(new) + print(f"Fixed links in {fname}") + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) From 77f5633ce02da71c6879d3b25b5fbe8b240647c6 Mon Sep 17 00:00:00 2001 From: Andrew Svetlov Date: Fri, 13 Apr 2018 12:29:16 +0300 Subject: [PATCH 005/144] Fix changenote --- CHANGES.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 02689ac2a1b..ce362dbf32c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -115,8 +115,6 @@ Bugfixes Improved Documentation ---------------------- -- Change ``ClientResponse.json()`` documentation to reflect that it now - allows "application/xxx+json" content-types (`#2206 `_) - Document behavior when cchardet detects encodings that are unknown to Python. (`#2732 `_) - Add diagrams for tracing request life style. (`#2748 `_) From 68c9697dade4933a591673fdeb5036af3dfb44fa Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 18 Nov 2023 15:35:19 +0000 Subject: [PATCH 006/144] Bump to 3.9.0.dev0 (#7847) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 538ce0a1038..f257f2818d4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.0" +__version__ = "3.9.0.dev0" from typing import TYPE_CHECKING, Tuple From c491ed7f1cde390b3cc6f43b27aac682a3c8f021 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 19 Nov 2023 14:38:19 +0000 Subject: [PATCH 007/144] [PR #7850/22170b21 backport][3.9] Fix import under PyPy 3.8/3.9 on Windows (#7854) **This is a backport of PR #7850 as merged into master (22170b21064be8fdf75b947d9c2930df7b2518e1).** Fixes #7848. Co-authored-by: Jelle Zijlstra --- CHANGES/7848.bugfix | 1 + aiohttp/cookiejar.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/7848.bugfix diff --git a/CHANGES/7848.bugfix b/CHANGES/7848.bugfix new file mode 100644 index 00000000000..13a29e2a226 --- /dev/null +++ b/CHANGES/7848.bugfix @@ -0,0 +1 @@ +Fix importing aiohttp under PyPy 3.8 and 3.9 on Windows. diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 15dd982c960..a348f112cb5 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -62,9 +62,10 @@ class CookieJar(AbstractCookieJar): ) try: calendar.timegm(time.gmtime(MAX_TIME)) - except OSError: + except (OSError, ValueError): # Hit the maximum representable time on Windows # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 + # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) except OverflowError: # #4515: datetime.max may not be representable on 32-bit platforms From 6034e261d1587e12bf5c5f4e9cd94a8c95acfc46 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 19 Nov 2023 16:04:43 +0000 Subject: [PATCH 008/144] [PR #7852/122597fc backport][3.9] Add requirement files to sdist (#7856) **This is a backport of PR #7852 as merged into master (122597fce2cf9a1d4c0b2ebaa0a8be567f19816c).** Co-authored-by: Marcel Telka --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 05084efddb9..d7c5cef6aad 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -7,6 +7,7 @@ graft aiohttp graft docs graft examples graft tests +graft requirements recursive-include vendor * global-include aiohttp *.pyi global-exclude *.pyc From 8d766df8085969eab3bd65cb253bb9f3ed52decd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 11:04:35 +0000 Subject: [PATCH 009/144] Bump yarl from 1.9.2 to 1.9.3 (#7860) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [yarl](https://github.com/aio-libs/yarl) from 1.9.2 to 1.9.3.
Release notes

Sourced from yarl's releases.

1.9.3

:bug: Bug fixes

  • Stopped dropping trailing slashes in yarl.URL.joinpath() -- by [@​gmacon]. (#862, #866)
  • Started accepting string subclasses in __truediv__() operations (URL / segment) -- by [@​mjpieters]. (#871, #884)
  • Fixed the human representation of URLs with square brackets in usernames and passwords -- by @​mjpieters. (#876, #882)
  • Updated type hints to include URL.missing_port(), URL.__bytes__() and the encoding argument to yarl.URL.joinpath() -- by @​mjpieters. (#891)

:package: Packaging updates and notes for downstreams

  • Integrated Cython 3 to enable building yarl under Python 3.12 -- by @​mjpieters. (#829, #881)

  • Added the changelog URL to the dist metadata -- by [@​scop]. (#877)

  • Declared modern setuptools.build_meta as the :pep:517 build backend in pyproject.toml explicitly -- by [@​webknjaz]. (#886)

  • Converted most of the packaging setup into a declarative setup.cfg config -- by @​webknjaz. (#890)

  • Replaced the packaging is replaced from an old-fashioned setup.py to an in-tree :pep:517 build backend -- by @​webknjaz.

    Whenever the end-users or downstream packagers need to build yarl from source (a Git checkout or an sdist), they may pass a config_settings flag --pure-python. If this flag is not set, a C-extension will be built and included into the distribution.

    Here is how this can be done with pip:

    $ python -m pip install .
    --config-settings=--pure-python=
    

    This will also work with -e | --editable.

    The same can be achieved via pypa/build:

    $ python -m build
    --config-setting=--pure-python=
    

    Adding -w | --wheel can force pypa/build produce a wheel from source directly, as opposed to building an sdist and then building from it. (#893)

  • Declared Python 3.12 supported officially in the distribution package metadata -- by [@​edgarrmondragon]. (#942)

:hammer_and_wrench: Contributor-facing changes

  • A regression test for no-host URLs was added per #821 and RFC 3986 -- by [@​kenballus]. (#821, #822)

  • Started testing yarl against Python 3.12 in CI -- by @​mjpieters. (#881)

  • All Python 3.12 jobs are now marked as required to pass in CI -- by @​edgarrmondragon. (#942)

  • MyST is now integrated in Sphinx -- by @​webknjaz.

    This allows the contributors to author new documents in Markdown when they have difficulties with going straight RST. (#953)

:muscle: New Contributors

... (truncated)

Changelog

Sourced from yarl's changelog.

1.9.3 (2023-11-20)

Bug fixes

  • Stopped dropping trailing slashes in :py:meth:~yarl.URL.joinpath -- by :user:gmacon. (:issue:862, :issue:866)
  • Started accepting string subclasses in __truediv__() operations (URL / segment) -- by :user:mjpieters. (:issue:871, :issue:884)
  • Fixed the human representation of URLs with square brackets in usernames and passwords -- by :user:mjpieters. (:issue:876, :issue:882)
  • Updated type hints to include URL.missing_port(), URL.__bytes__() and the encoding argument to :py:meth:~yarl.URL.joinpath -- by :user:mjpieters. (:issue:891)

Packaging updates and notes for downstreams

  • Integrated Cython 3 to enable building yarl under Python 3.12 -- by :user:mjpieters. (:issue:829, :issue:881)

  • Declared modern setuptools.build_meta as the :pep:517 build backend in :file:pyproject.toml explicitly -- by :user:webknjaz. (:issue:886)

  • Converted most of the packaging setup into a declarative :file:setup.cfg config -- by :user:webknjaz. (:issue:890)

  • Replaced the packaging is replaced from an old-fashioned :file:setup.py to an in-tree :pep:517 build backend -- by :user:webknjaz.

    Whenever the end-users or downstream packagers need to build yarl from source (a Git checkout or an sdist), they may pass a config_settings flag --pure-python. If this flag is not set, a C-extension will be built and included into the distribution.

    Here is how this can be done with pip:

    .. code-block:: console

    $ python -m pip install . --config-settings=--pure-python=
    

    This will also work with -e | --editable.

    The same can be achieved via pypa/build:

    .. code-block:: console

    $ python -m build --config-setting=--pure-python=
    

    Adding -w | --wheel can force pypa/build produce a wheel from source directly, as opposed to building an sdist and then building from it. (:issue:893)

  • Declared Python 3.12 supported officially in the distribution package metadata -- by :user:edgarrmondragon. (:issue:942)

... (truncated)

Commits
  • 0698dc9 ⇪📦 Release yarl v1.9.3
  • 690b54c 💄 Mark the GHA workflow as CI/CD
  • 1addb55 🐛🧪 Fix issue replacement in changelog @ CI
  • c2ab1e9 🎨 Pass explicit --no-color to Pip in CI
  • f202823 🐛🧪 Unset FORCE_COLOR on Bash level @ CI
  • ef5664b 💡Use NO_COLOR to override FORCE_COLOR @ pip
  • d1d9f6e Merge PR #956
  • e14d10e Attempt disabling FORCE_COLOR env var w/ YAML null
  • 11276e0 📦Explicitly list NOTICE @ wheel license files
  • 180c2b2 🐛Uncolorize pip JSON report in whl compat cmd
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.9.2&new-version=1.9.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 4 +--- requirements/dev.txt | 7 ++----- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 6 insertions(+), 11 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 89d5aec9195..77943e4e44a 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -38,5 +38,5 @@ typing-extensions==4.7.1 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index df021579ea0..f03b34c9f7d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -188,7 +188,6 @@ setuptools-git==1.2 six==1.16.0 # via # python-dateutil - # sphinx # virtualenv slotscheck==0.17.1 # via -r requirements/lint.in @@ -266,7 +265,7 @@ webcolors==1.11.1 # via blockdiag wheel==0.37.0 # via pip-tools -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in zipp==3.17.0 # via @@ -280,4 +279,3 @@ setuptools==68.0.0 # via # blockdiag # pip-tools - # sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 605327c4e94..34a79cc5636 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -177,9 +177,7 @@ requests==2.31.0 setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 - # via - # python-dateutil - # sphinx + # via python-dateutil slotscheck==0.17.1 # via -r requirements/lint.in snowballstemmer==2.2.0 @@ -254,7 +252,7 @@ webcolors==1.13 # via blockdiag wheel==0.41.0 # via pip-tools -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in zipp==3.17.0 # via @@ -269,4 +267,3 @@ setuptools==68.0.0 # blockdiag # nodeenv # pip-tools - # sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 9d1e47b2e54..a0f2aa861f7 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -30,5 +30,5 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 0f8a7ef8ee1..661a66528d2 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -124,5 +124,5 @@ uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.2 +yarl==1.9.3 # via -r requirements/runtime-deps.in From cede54b211b96af860944150b3077bf121f3d937 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 22 Nov 2023 19:15:19 +0000 Subject: [PATCH 010/144] [PR #7863/3a21134a backport][3.9] remove tests/__init__.py (#7870) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #7863 as merged into master (3a21134a0e3e8a163faa7436383e92da08415f13).** Co-authored-by: Robert Schütz --- tests/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests/__init__.py diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 From 5fa260aaa35d81dcbbeb15ee3891337870f77c99 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 23 Nov 2023 12:10:36 +0100 Subject: [PATCH 011/144] Fix ClientResponse.close releasing the connection instead of closing (#7869) (#7873) (cherry picked from commit 25ef450238864d06ba8a44227080e4a7b354ba76) --- CHANGES/7869.bugfix | 1 + aiohttp/client_reqrep.py | 4 +++- tests/test_client_functional.py | 36 ++++++++++++++++++++++++++++++++- 3 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7869.bugfix diff --git a/CHANGES/7869.bugfix b/CHANGES/7869.bugfix new file mode 100644 index 00000000000..23282fc3bb4 --- /dev/null +++ b/CHANGES/7869.bugfix @@ -0,0 +1 @@ +Fix ClientResponse.close releasing the connection instead of closing diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4cea7466d8d..0ab84743658 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1033,7 +1033,9 @@ def close(self) -> None: return self._cleanup_writer() - self._release_connection() + if self._connection is not None: + self._connection.close() + self._connection = None def release(self) -> Any: if not self._released: diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index c5379e74a4b..6698ac6ef52 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -19,6 +19,7 @@ from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver from aiohttp.client_exceptions import TooManyRedirects +from aiohttp.pytest_plugin import AiohttpClient, TestClient from aiohttp.test_utils import unused_port @@ -3186,7 +3187,40 @@ async def handler(request): await client.get("/") -async def test_read_timeout_on_prepared_response(aiohttp_client) -> None: +async def test_read_timeout_closes_connection(aiohttp_client: AiohttpClient) -> None: + request_count = 0 + + async def handler(request): + nonlocal request_count + request_count += 1 + if request_count < 3: + await asyncio.sleep(0.5) + return web.Response(body=f"request:{request_count}") + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + timeout = aiohttp.ClientTimeout(total=0.1) + client: TestClient = await aiohttp_client(app, timeout=timeout) + with pytest.raises(asyncio.TimeoutError): + await client.get("/") + + # Make sure its really closed + assert not client.session.connector._conns + + with pytest.raises(asyncio.TimeoutError): + await client.get("/") + + # Make sure its really closed + assert not client.session.connector._conns + result = await client.get("/") + assert await result.read() == b"request:3" + + # Make sure its not closed + assert client.session.connector._conns + + +async def test_read_timeout_on_prepared_response(aiohttp_client: Any) -> None: async def handler(request): resp = aiohttp.web.StreamResponse() await resp.prepare(request) From a04970150c6ce9fda22c9f63d947845f79148b4c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 23 Nov 2023 14:31:30 +0100 Subject: [PATCH 012/144] Fix usage of proxy.py in test_proxy_functional (#7773) (#7876) (cherry picked from commit 4d9fc636dbad45678330f17b7d82b75cf91247bf) --- tests/test_proxy_functional.py | 76 ++++++++++++++-------------------- 1 file changed, 30 insertions(+), 46 deletions(-) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index 61e30841cc1..de5eeb258ff 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -1,8 +1,8 @@ import asyncio -import functools import os import pathlib -import platform +import ssl +import sys from re import match as match_regex from unittest import mock from uuid import uuid4 @@ -13,8 +13,8 @@ import aiohttp from aiohttp import web -from aiohttp.client_exceptions import ClientConnectionError, ClientProxyConnectionError -from aiohttp.helpers import IS_MACOS, IS_WINDOWS, PY_310 +from aiohttp.client_exceptions import ClientConnectionError +from aiohttp.helpers import IS_MACOS, IS_WINDOWS pytestmark = [ pytest.mark.filterwarnings( @@ -28,20 +28,7 @@ ] -secure_proxy_xfail_under_py310_linux = functools.partial( - pytest.mark.xfail, - PY_310 and platform.system() == "Linux", - reason=( - "The secure proxy fixture does not seem to work " - "under Python 3.10 on Linux. " - "See https://github.com/abhinavsingh/proxy.py/issues/622." - ), -) - -ASYNCIO_SUPPORTS_TLS_IN_TLS = hasattr( - asyncio.sslproto._SSLProtocolTransport, - "_start_tls_compatible", -) +ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11) @pytest.fixture @@ -51,6 +38,9 @@ def secure_proxy_url(tls_certificate_pem_path): This fixture also spawns that instance and tears it down after the test. """ proxypy_args = [ + # --threadless does not work on windows, see + # https://github.com/abhinavsingh/proxy.py/issues/492 + "--threaded" if os.name == "nt" else "--threadless", "--num-workers", "1", # the tests only send one query anyway "--hostname", @@ -112,32 +102,20 @@ async def handler(*args, **kwargs): ) -@pytest.fixture -def _pretend_asyncio_supports_tls_in_tls( - monkeypatch, - web_server_endpoint_type, -): - if web_server_endpoint_type != "https" or ASYNCIO_SUPPORTS_TLS_IN_TLS: - return - - # for https://github.com/python/cpython/pull/28073 - # and https://bugs.python.org/issue37179 - monkeypatch.setattr( - asyncio.sslproto._SSLProtocolTransport, - "_start_tls_compatible", - True, - raising=False, - ) - - -@secure_proxy_xfail_under_py310_linux(raises=ClientProxyConnectionError) +@pytest.mark.skipif( + not ASYNCIO_SUPPORTS_TLS_IN_TLS, + reason="asyncio on this python does not support TLS in TLS", +) @pytest.mark.parametrize("web_server_endpoint_type", ("http", "https")) -@pytest.mark.usefixtures("_pretend_asyncio_supports_tls_in_tls", "loop") +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. async def test_secure_https_proxy_absolute_path( - client_ssl_ctx, - secure_proxy_url, - web_server_endpoint_url, - web_server_endpoint_payload, + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, + web_server_endpoint_url: str, + web_server_endpoint_payload: str, ) -> None: """Ensure HTTP(S) sites are accessible through a secure proxy.""" conn = aiohttp.TCPConnector() @@ -160,13 +138,19 @@ async def test_secure_https_proxy_absolute_path( await asyncio.sleep(0.1) -@secure_proxy_xfail_under_py310_linux(raises=AssertionError) @pytest.mark.parametrize("web_server_endpoint_type", ("https",)) @pytest.mark.usefixtures("loop") +@pytest.mark.skipif( + ASYNCIO_SUPPORTS_TLS_IN_TLS, reason="asyncio on this python supports TLS in TLS" +) +@pytest.mark.filterwarnings(r"ignore:.*ssl.OP_NO_SSL*") +# Filter out the warning from +# https://github.com/abhinavsingh/proxy.py/blob/30574fd0414005dfa8792a6e797023e862bdcf43/proxy/common/utils.py#L226 +# otherwise this test will fail because the proxy will die with an error. async def test_https_proxy_unsupported_tls_in_tls( - client_ssl_ctx, - secure_proxy_url, - web_server_endpoint_type, + client_ssl_ctx: ssl.SSLContext, + secure_proxy_url: URL, + web_server_endpoint_type: str, ) -> None: """Ensure connecting to TLS endpoints w/ HTTPS proxy needs patching. From 41a9f1f5b9b2630b1f4b971811c7ef8f016262fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Nov 2023 11:07:22 +0000 Subject: [PATCH 013/144] Bump mypy from 1.7.0 to 1.7.1 (#7882) Bumps [mypy](https://github.com/python/mypy) from 1.7.0 to 1.7.1.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.7.0&new-version=1.7.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f03b34c9f7d..37367af4773 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -108,7 +108,7 @@ multidict==6.0.4 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 34a79cc5636..9639c875dc1 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -103,7 +103,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index e90f801862d..28d0bf65778 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 661a66528d2..6451eb45cfc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,7 +53,7 @@ multidict==6.0.4 # via # -r requirements/runtime-deps.in # yarl -mypy==1.7.0 ; implementation_name == "cpython" +mypy==1.7.1 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From 6dd0122417f00ed4b2b353226a1b164b6463a245 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 24 Nov 2023 19:16:40 +0000 Subject: [PATCH 014/144] Update dependabot.yml (#7888) Sync from master --- .github/dependabot.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cd8b2782b43..3b392a34b3b 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,22 +18,22 @@ updates: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for GitHub Actions aiohttp 3.8 + # Maintain dependencies for GitHub Actions aiohttp 3.9 - package-ecosystem: "github-actions" directory: "/" labels: - dependencies - target-branch: "3.8" + target-branch: "3.9" schedule: interval: "daily" open-pull-requests-limit: 10 - # Maintain dependencies for Python aiohttp 3.8 + # Maintain dependencies for Python aiohttp 3.10 - package-ecosystem: "pip" directory: "/" labels: - dependencies - target-branch: "3.8" + target-branch: "3.10" schedule: interval: "daily" open-pull-requests-limit: 10 From bb111012706d3ef9edc525be3d8d4df410ad847f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 24 Nov 2023 15:11:06 -0600 Subject: [PATCH 015/144] Restore async concurrency safety to websocket compressor (#7865) (#7889) Fixes #7859 (cherry picked from commit 86a23961531103ccc34853f67321c7d0f63797f5) --- CHANGES/7865.bugfix | 1 + aiohttp/compression_utils.py | 22 +++++++---- aiohttp/http_websocket.py | 26 ++++++++----- tests/test_websocket_writer.py | 67 +++++++++++++++++++++++++++++++++- 4 files changed, 97 insertions(+), 19 deletions(-) create mode 100644 CHANGES/7865.bugfix diff --git a/CHANGES/7865.bugfix b/CHANGES/7865.bugfix new file mode 100644 index 00000000000..9a46e124486 --- /dev/null +++ b/CHANGES/7865.bugfix @@ -0,0 +1 @@ +Restore async concurrency safety to websocket compressor diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 52791fe5015..9631d377e9a 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -62,19 +62,25 @@ def __init__( self._compressor = zlib.compressobj( wbits=self._mode, strategy=strategy, level=level ) + self._compress_lock = asyncio.Lock() def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) async def compress(self, data: bytes) -> bytes: - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_event_loop().run_in_executor( - self._executor, self.compress_sync, data - ) - return self.compress_sync(data) + async with self._compress_lock: + # To ensure the stream is consistent in the event + # there are multiple writers, we need to lock + # the compressor so that only one writer can + # compress at a time. + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_event_loop().run_in_executor( + self._executor, self.compress_sync, data + ) + return self.compress_sync(data) def flush(self, mode: int = zlib.Z_FINISH) -> bytes: return self._compressor.flush(mode) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index a94ac2a73dd..f395a27614a 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -635,21 +635,17 @@ async def _send_frame( if (compress or self.compress) and opcode < 8: if compress: # Do not set self._compress if compressing is for this frame - compressobj = ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) + compressobj = self._make_compress_obj(compress) else: # self.compress if not self._compressobj: - self._compressobj = ZLibCompressor( - level=zlib.Z_BEST_SPEED, - wbits=-self.compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, - ) + self._compressobj = self._make_compress_obj(self.compress) compressobj = self._compressobj message = await compressobj.compress(message) + # Its critical that we do not return control to the event + # loop until we have finished sending all the compressed + # data. Otherwise we could end up mixing compressed frames + # if there are multiple coroutines compressing data. message += compressobj.flush( zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH ) @@ -687,10 +683,20 @@ async def _send_frame( self._output_size += len(header) + len(message) + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() + def _make_compress_obj(self, compress: int) -> ZLibCompressor: + return ZLibCompressor( + level=zlib.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + def _write(self, data: bytes) -> None: if self.transport is None or self.transport.is_closing(): raise ConnectionResetError("Cannot write to closing transport") diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index fce3c330d27..8dbbc815fb7 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -1,9 +1,12 @@ +import asyncio import random +from typing import Any, Callable from unittest import mock import pytest -from aiohttp.http import WebSocketWriter +from aiohttp import DataQueue, WSMessage +from aiohttp.http import WebSocketReader, WebSocketWriter from aiohttp.test_utils import make_mocked_coro @@ -104,3 +107,65 @@ async def test_send_compress_text_per_message(protocol, transport) -> None: writer.transport.write.assert_called_with(b"\x81\x04text") await writer.send(b"text", compress=15) writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") + + +@pytest.mark.parametrize( + ("max_sync_chunk_size", "payload_point_generator"), + ( + (16, lambda count: count), + (4096, lambda count: count), + (32, lambda count: 64 + count if count % 2 else count), + ), +) +async def test_concurrent_messages( + protocol: Any, + transport: Any, + max_sync_chunk_size: int, + payload_point_generator: Callable[[int], int], +) -> None: + """Ensure messages are compressed correctly when there are multiple concurrent writers. + + This test generates is parametrized to + + - Generate messages that are larger than patch + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 16 + where compression will run in the executor + + - Generate messages that are smaller than patch + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 4096 + where compression will run in the event loop + + - Interleave generated messages with a + WEBSOCKET_MAX_SYNC_CHUNK_SIZE of 32 + where compression will run in the event loop + and in the executor + """ + with mock.patch( + "aiohttp.http_websocket.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", max_sync_chunk_size + ): + writer = WebSocketWriter(protocol, transport, compress=15) + queue: DataQueue[WSMessage] = DataQueue(asyncio.get_running_loop()) + reader = WebSocketReader(queue, 50000) + writers = [] + payloads = [] + for count in range(1, 64 + 1): + point = payload_point_generator(count) + payload = bytes((point,)) * point + payloads.append(payload) + writers.append(writer.send(payload, binary=True)) + await asyncio.gather(*writers) + + for call in writer.transport.write.call_args_list: + call_bytes = call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + bytes_data: bytes = msg.data + first_char = bytes_data[0:1] + char_val = ord(first_char) + assert len(bytes_data) == char_val + # If we have a concurrency problem, the data + # tends to get mixed up between messages so + # we want to validate that all the bytes are + # the same value + assert bytes_data == bytes_data[0:1] * char_val From 2ae4d6ffdd015f622bfb75dee98ad629240cccc4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 25 Nov 2023 20:19:30 +0000 Subject: [PATCH 016/144] Message is not upgraded if Upgrade header is missing (#7895) (#7898) (cherry picked from commit fde031fe7b0d6060eab4ff13d588a882bb7a1ddb) --- CHANGES/7895.bugfix | 1 + aiohttp/http_parser.py | 3 ++- tests/test_http_parser.py | 9 +++++++++ 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 CHANGES/7895.bugfix diff --git a/CHANGES/7895.bugfix b/CHANGES/7895.bugfix new file mode 100644 index 00000000000..557df294d71 --- /dev/null +++ b/CHANGES/7895.bugfix @@ -0,0 +1 @@ +Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index b435096c5c7..85499177701 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -510,7 +510,8 @@ def parse_headers( close_conn = True elif v == "keep-alive": close_conn = False - elif v == "upgrade": + # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols + elif v == "upgrade" and headers.get(hdrs.UPGRADE): upgrade = True # encoding diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a6b4988c452..820a76cb821 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -474,6 +474,15 @@ def test_conn_upgrade(parser: Any) -> None: assert upgrade +def test_bad_upgrade(parser) -> None: + """Test not upgraded if missing Upgrade header.""" + text = b"GET /test HTTP/1.1\r\nconnection: upgrade\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + msg = messages[0][0] + assert not msg.upgrade + assert not upgrade + + def test_compression_empty(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"content-encoding: \r\n\r\n" messages, upgrade, tail = parser.feed_data(text) From ddc2a26c9e0c43fd1229e4424f2a30d1b10ced13 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 25 Nov 2023 21:26:54 +0000 Subject: [PATCH 017/144] [PR #7896/9a7cfe77 backport][3.9] Fix some flaky tests (#7900) **This is a backport of PR #7896 as merged into master (9a7cfe77623b9a61e4e58f425fff99529de2f795).** Co-authored-by: Sam Bull --- tests/test_web_server.py | 7 +++++-- tests/test_web_urldispatcher.py | 12 ++++++------ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/tests/test_web_server.py b/tests/test_web_server.py index 73e69831991..d0fd95acdb4 100644 --- a/tests/test_web_server.py +++ b/tests/test_web_server.py @@ -219,9 +219,11 @@ async def test_no_handler_cancellation(aiohttp_unused_port) -> None: timeout_event = asyncio.Event() done_event = asyncio.Event() port = aiohttp_unused_port() + started = False async def on_request(_: web.Request) -> web.Response: - nonlocal done_event, timeout_event + nonlocal done_event, started, timeout_event + started = True await asyncio.wait_for(timeout_event.wait(), timeout=5) done_event.set() return web.Response() @@ -238,7 +240,7 @@ async def on_request(_: web.Request) -> web.Response: try: async with client.ClientSession( - timeout=client.ClientTimeout(total=0.1) + timeout=client.ClientTimeout(total=0.2) ) as sess: with pytest.raises(asyncio.TimeoutError): await sess.get(f"http://localhost:{port}/") @@ -247,6 +249,7 @@ async def on_request(_: web.Request) -> web.Response: with suppress(asyncio.TimeoutError): await asyncio.wait_for(done_event.wait(), timeout=1) + assert started assert done_event.is_set() finally: await asyncio.gather(runner.shutdown(), site.stop()) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 4fbf5b02ecc..8ca8dcd7b99 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -93,13 +93,13 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get(prefix) - assert r.status == status + async with await client.get(prefix) as r: + assert r.status == status - if data: - assert r.headers["Content-Type"] == "text/html; charset=utf-8" - read_ = await r.read() - assert read_ == data + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data async def test_follow_symlink( From 946523d6380bd79e13146557432f46f6f9bbd53f Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sat, 25 Nov 2023 23:30:52 +0000 Subject: [PATCH 018/144] Fix flaky websocket test (#7902) (#7904) (cherry picked from commit 28d0b06c267335555f46569d0fd8803b74b1a5a4) --- tests/test_client_ws_functional.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 87e4162c04f..6270675276e 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -5,6 +5,7 @@ import aiohttp from aiohttp import hdrs, web +from aiohttp.http import WSCloseCode if sys.version_info >= (3, 11): import asyncio as async_timeout @@ -581,12 +582,12 @@ async def handler(request): app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", heartbeat=0.05) - - await resp.receive() - await resp.receive() + resp = await client.ws_connect("/", heartbeat=0.1) + # Connection should be closed roughly after 1.5x heartbeat. + await asyncio.sleep(0.2) assert ping_received + assert resp.close_code is WSCloseCode.ABNORMAL_CLOSURE async def test_send_recv_compress(aiohttp_client) -> None: From dd175b6b89564dc74fba0692a8a5f9a9b38e528a Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 26 Nov 2023 15:22:31 +0000 Subject: [PATCH 019/144] Fix regression with connection upgrade (#7879) (#7908) Fixes #7867. (cherry picked from commit 48b15583305e692ce997ec6f5a6a2f88f23ace71) --- CHANGES/7879.bugfix | 1 + aiohttp/client_reqrep.py | 19 ++++++++----------- aiohttp/connector.py | 4 ++++ tests/test_client_functional.py | 19 +++++++++++++++++++ 4 files changed, 32 insertions(+), 11 deletions(-) create mode 100644 CHANGES/7879.bugfix diff --git a/CHANGES/7879.bugfix b/CHANGES/7879.bugfix new file mode 100644 index 00000000000..08baf85be42 --- /dev/null +++ b/CHANGES/7879.bugfix @@ -0,0 +1 @@ +Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 0ab84743658..1d946aea320 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -1006,19 +1006,14 @@ def _response_eof(self) -> None: if self._closed: return - if self._connection is not None: - # websocket, protocol could be None because - # connection could be detached - if ( - self._connection.protocol is not None - and self._connection.protocol.upgraded - ): - return - - self._release_connection() + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: + return self._closed = True self._cleanup_writer() + self._release_connection() @property def closed(self) -> bool: @@ -1113,7 +1108,9 @@ async def read(self) -> bytes: elif self._released: # Response explicitly released raise ClientConnectionError("Connection closed") - await self._wait_released() # Underlying connection released + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released return self._body # type: ignore[no-any-return] def get_encoding(self) -> str: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d85679f8bca..61c26430860 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -127,6 +127,10 @@ def __del__(self, _warnings: Any = warnings) -> None: context["source_traceback"] = self._source_traceback self._loop.call_exception_handler(context) + def __bool__(self) -> Literal[True]: + """Force subclasses to not be falsy, to make checks simpler.""" + return True + @property def loop(self) -> asyncio.AbstractEventLoop: warnings.warn( diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 6698ac6ef52..8a9a4e184be 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -173,6 +173,25 @@ async def handler(request): assert 1 == len(client._session.connector._conns) +async def test_upgrade_connection_not_released_after_read(aiohttp_client) -> None: + async def handler(request: web.Request) -> web.Response: + body = await request.read() + assert b"" == body + return web.Response( + status=101, headers={"Connection": "Upgrade", "Upgrade": "tcp"} + ) + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + + resp = await client.get("/") + await resp.read() + assert resp.connection is not None + assert not resp.closed + + async def test_keepalive_server_force_close_connection(aiohttp_client) -> None: async def handler(request): body = await request.read() From 9dbd273093d6af6f5e1481816b05a7192860b440 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 26 Nov 2023 16:00:04 +0000 Subject: [PATCH 020/144] [PR #7673/aa7d1a8f backport][3.9] Document release process (#7909) **This is a backport of PR #7673 as merged into master (aa7d1a8fcad4ac4f1f0eae577f4c1947ebc1acf3).** Co-authored-by: Sam Bull --- docs/contributing-admins.rst | 57 ++++++++++++++++++++++++++++++++++++ docs/contributing.rst | 2 ++ docs/spelling_wordlist.txt | 3 +- 3 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 docs/contributing-admins.rst diff --git a/docs/contributing-admins.rst b/docs/contributing-admins.rst new file mode 100644 index 00000000000..488953c6cc5 --- /dev/null +++ b/docs/contributing-admins.rst @@ -0,0 +1,57 @@ +:orphan: + +Instructions for aiohttp admins +=============================== + +This page is intended to document certain processes for admins of the aiohttp repository. +For regular contributors, return to :doc:`contributing`. + +.. contents:: + :local: + +Creating a new release +---------------------- + +.. note:: The example commands assume that ``origin`` refers to the ``aio-libs`` repository. + +To create a new release: + +#. Start on the branch for the release you are planning (e.g. ``3.8`` for v3.8.6): ``git checkout 3.8 && git pull`` +#. Update the version number in ``__init__.py``. +#. Run ``towncrier``. +#. Check and cleanup the changes in ``CHANGES.rst``. +#. Checkout a new branch: e.g. ``git checkout -b release/v3.8.6`` +#. Commit and create a PR. Once PR is merged, continue. +#. Go back to the release branch: e.g. ``git checkout 3.8 && git pull`` +#. Add a tag: e.g. ``git tag -a v3.8.6 -m 'Release 3.8.6'`` +#. Push the tag: e.g. ``git push origin v3.8.6`` +#. Monitor CI to ensure release process completes without errors. + +Once released, we need to complete some cleanup steps (no further steps are needed for +non-stable releases though). If doing a patch release, we need to do the below steps twice, +first merge into the newer release branch (e.g. 3.8 into 3.9) and then to master +(e.g. 3.9 into master). If a new minor release, then just merge to master. + +#. Switch to target branch: e.g. ``git checkout 3.9 && git pull`` +#. Start a merge: e.g. ``git merge 3.8 --no-commit --no-ff --gpg-sign`` +#. Carefully review the changes and revert anything that should not be included (most + things outside the changelog). +#. To ensure change fragments are cleaned up properly, run: ``python tools/cleanup_changes.py`` +#. Commit the merge (must be a normal merge commit, not squashed). +#. Push the branch directly to Github (because a PR would get squashed). When pushing, + you may get a rejected message. Follow these steps to resolve: + + #. Checkout to a new branch and push: e.g. ``git checkout -b do-not-merge && git push`` + #. Open a *draft* PR with a title of 'DO NOT MERGE'. + #. Once the CI has completed on that branch, you should be able to switch back and push + the target branch (as tests have passed on the merge commit now). + #. This should automatically consider the PR merged and delete the temporary branch. + +Back on the original release branch, append ``.dev0`` to the version number in ``__init__.py``. + +If doing a minor release: + +#. Create a new release branch for future features to go to: e.g. ``git checkout -b 3.10 3.9 && git push`` +#. Update ``target-branch`` for Dependabot to reference the new branch name in ``.github/dependabot.yml``. +#. Delete the older backport label (e.g. backport-3.8): https://github.com/aio-libs/aiohttp/labels +#. Add a new backport label (e.g. backport-3.10). diff --git a/docs/contributing.rst b/docs/contributing.rst index 1b826eb0a9f..5263f4a3f47 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -3,6 +3,8 @@ Contributing ============ +(:doc:`contributing-admins`) + Instructions for contributors ----------------------------- diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 84cb5cd8131..1523ccd2a65 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -53,6 +53,7 @@ canonicalize cchardet cChardet ceil +changelog Changelog chardet Chardet @@ -88,7 +89,7 @@ Cythonize cythonized de deduplicate -# de-facto: +Dependabot deprecations DER dev From 6333c026422c6b0fe57ff63cde4104e2d00f47f4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 26 Nov 2023 16:33:53 +0000 Subject: [PATCH 021/144] Release v3.9.1 (#7911) --- CHANGES.rst | 31 +++++++++++++++++++++++++++++++ CHANGES/7848.bugfix | 1 - CHANGES/7865.bugfix | 1 - CHANGES/7869.bugfix | 1 - CHANGES/7879.bugfix | 1 - CHANGES/7895.bugfix | 1 - aiohttp/__init__.py | 2 +- 7 files changed, 32 insertions(+), 6 deletions(-) delete mode 100644 CHANGES/7848.bugfix delete mode 100644 CHANGES/7865.bugfix delete mode 100644 CHANGES/7869.bugfix delete mode 100644 CHANGES/7879.bugfix delete mode 100644 CHANGES/7895.bugfix diff --git a/CHANGES.rst b/CHANGES.rst index fcfd111b8a4..8c2a2707408 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,37 @@ .. towncrier release notes start +3.9.1 (2023-11-26) +================== + +Bugfixes +-------- + +- Fixed importing aiohttp under PyPy on Windows. + + `#7848 `_ + +- Fixed async concurrency safety in websocket compressor. + + `#7865 `_ + +- Fixed ``ClientResponse.close()`` releasing the connection instead of closing. + + `#7869 `_ + +- Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` + + `#7879 `_ + +- Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` + + `#7895 `_ + + + +---- + + 3.9.0 (2023-11-18) ================== diff --git a/CHANGES/7848.bugfix b/CHANGES/7848.bugfix deleted file mode 100644 index 13a29e2a226..00000000000 --- a/CHANGES/7848.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix importing aiohttp under PyPy 3.8 and 3.9 on Windows. diff --git a/CHANGES/7865.bugfix b/CHANGES/7865.bugfix deleted file mode 100644 index 9a46e124486..00000000000 --- a/CHANGES/7865.bugfix +++ /dev/null @@ -1 +0,0 @@ -Restore async concurrency safety to websocket compressor diff --git a/CHANGES/7869.bugfix b/CHANGES/7869.bugfix deleted file mode 100644 index 23282fc3bb4..00000000000 --- a/CHANGES/7869.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix ClientResponse.close releasing the connection instead of closing diff --git a/CHANGES/7879.bugfix b/CHANGES/7879.bugfix deleted file mode 100644 index 08baf85be42..00000000000 --- a/CHANGES/7879.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed a regression where connection may get closed during upgrade. -- by :user:`Dreamsorcerer` diff --git a/CHANGES/7895.bugfix b/CHANGES/7895.bugfix deleted file mode 100644 index 557df294d71..00000000000 --- a/CHANGES/7895.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fixed messages being reported as upgraded without an Upgrade header in Python parser. -- by :user:`Dreamsorcerer` diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index f257f2818d4..32f85acb028 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.0.dev0" +__version__ = "3.9.1" from typing import TYPE_CHECKING, Tuple From 0bc17e6daf508876838e4cf4d1b999e53edfdd1d Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 26 Nov 2023 18:40:30 +0000 Subject: [PATCH 022/144] Bump version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 32f85acb028..e66c9eba427 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.1" +__version__ = "3.9.1.dev0" from typing import TYPE_CHECKING, Tuple From ee8f7bfead9cf54e29520048a4f05cc56c27561e Mon Sep 17 00:00:00 2001 From: Alex <52292902+alexrudd2@users.noreply.github.com> Date: Wed, 29 Nov 2023 16:45:51 -0600 Subject: [PATCH 023/144] [PR #7808/213d1b2 backport][3.9] Restore requirements-txt-fixer in pre-commit (#7920) Backport of https://github.com/aio-libs/aiohttp/pull/7808 cherry picked from commit 213d1b22d42ce8efc54d7858b490e920fcdb4f0a --- .pre-commit-config.yaml | 3 +-- Makefile | 5 +---- requirements/runtime-deps.in | 12 ++++++------ requirements/sync-direct-runtime-deps.py | 16 ++++++++++++++++ requirements/test.in | 2 +- setup.cfg | 6 +++--- 6 files changed, 28 insertions(+), 16 deletions(-) create mode 100755 requirements/sync-direct-runtime-deps.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ffa16b6fb36..587c46e991d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -45,8 +45,7 @@ repos: exclude: >- ^docs/[^/]*\.svg$ - id: requirements-txt-fixer - exclude: >- - ^requirements/.*\.txt$ + files: requirements/.*\.in$ - id: trailing-whitespace - id: file-contents-sorter args: ['--ignore-case'] diff --git a/Makefile b/Makefile index cdeb0ad0ed9..e3ec98c7ce8 100644 --- a/Makefile +++ b/Makefile @@ -182,7 +182,4 @@ install-dev: .develop .PHONY: sync-direct-runtime-deps sync-direct-runtime-deps: @echo Updating 'requirements/runtime-deps.in' from 'setup.cfg'... >&2 - @echo '# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`' > requirements/runtime-deps.in - @echo >> requirements/runtime-deps.in - @python -c 'from configparser import ConfigParser; from itertools import chain; from pathlib import Path; cfg = ConfigParser(); cfg.read_string(Path("setup.cfg").read_text()); print("\n".join(line.strip() for line in chain(cfg["options"].get("install_requires").splitlines(), "\n".join(cfg["options.extras_require"].values()).splitlines()) if line.strip()))' \ - >> requirements/runtime-deps.in + @python requirements/sync-direct-runtime-deps.py diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 2bfb21ecd18..b2df16f1680 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,11 +1,11 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -attrs >= 17.3.0 -multidict >=4.5, < 7.0 -async-timeout >= 4.0, < 5.0 ; python_version < "3.11" -yarl >= 1.0, < 2.0 -frozenlist >= 1.1.1 -aiosignal >= 1.1.2 aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiosignal >= 1.1.2 +async-timeout >= 4.0, < 5.0 ; python_version < "3.11" +attrs >= 17.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' +frozenlist >= 1.1.1 +multidict >=4.5, < 7.0 +yarl >= 1.0, < 2.0 diff --git a/requirements/sync-direct-runtime-deps.py b/requirements/sync-direct-runtime-deps.py new file mode 100755 index 00000000000..adc28bdd287 --- /dev/null +++ b/requirements/sync-direct-runtime-deps.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +"""Sync direct runtime dependencies from setup.cfg to runtime-deps.in.""" + +from configparser import ConfigParser +from pathlib import Path + +cfg = ConfigParser() +cfg.read(Path("setup.cfg")) +reqs = cfg["options"]["install_requires"] + cfg.items("options.extras_require")[0][1] +reqs = sorted(reqs.split("\n"), key=str.casefold) +reqs.remove("") + +with open(Path("requirements", "runtime-deps.in"), "w") as outfile: + header = "# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`\n\n" + outfile.write(header) + outfile.write("\n".join(reqs) + "\n") diff --git a/requirements/test.in b/requirements/test.in index 417d45959be..5c1edf5dabe 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -1,5 +1,5 @@ --r base.in -c broken-projects.in +-r base.in coverage mypy; implementation_name == "cpython" diff --git a/setup.cfg b/setup.cfg index 8026c34cbab..c0515be8eeb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -47,12 +47,12 @@ zip_safe = False include_package_data = True install_requires = + aiosignal >= 1.1.2 attrs >= 17.3.0 - multidict >=4.5, < 7.0 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" - yarl >= 1.0, < 2.0 frozenlist >= 1.1.1 - aiosignal >= 1.1.2 + multidict >=4.5, < 7.0 + yarl >= 1.0, < 2.0 [options.exclude_package_data] * = From d548b59b577fd6ec1f99fc6b1a2a0a69666e7606 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 16:19:56 +0000 Subject: [PATCH 024/144] [PR #7930/895afa83 backport][3.9] Use .coveragerc for TYPE_CHECKING conditions (#7931) **This is a backport of PR #7930 as merged into master (895afa83027cb88b5a99d521cff48b549d325990).** Co-authored-by: Sam Bull --- .coveragerc | 4 ++++ aiohttp/__init__.py | 2 +- aiohttp/abc.py | 4 ++-- aiohttp/client_exceptions.py | 2 +- aiohttp/client_reqrep.py | 2 +- aiohttp/connector.py | 2 +- aiohttp/multipart.py | 2 +- aiohttp/payload.py | 4 ++-- aiohttp/test_utils.py | 2 +- aiohttp/tracing.py | 2 +- aiohttp/typedefs.py | 2 +- aiohttp/web_app.py | 4 ++-- aiohttp/web_fileresponse.py | 2 +- aiohttp/web_middlewares.py | 2 +- aiohttp/web_protocol.py | 2 +- aiohttp/web_request.py | 2 +- aiohttp/web_response.py | 2 +- aiohttp/web_routedef.py | 2 +- aiohttp/web_urldispatcher.py | 2 +- 19 files changed, 25 insertions(+), 21 deletions(-) diff --git a/.coveragerc b/.coveragerc index 61cb5ad822d..0b5d5bf0ad4 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,3 +2,7 @@ branch = True source = aiohttp, tests omit = site-packages + +[report] +exclude_also = + if TYPE_CHECKING diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index e66c9eba427..c0fd9817546 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -104,7 +104,7 @@ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: # At runtime these are lazy-loaded at the bottom of the file. from .worker import ( GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ceb4490019a..ee838998997 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -22,7 +22,7 @@ from .helpers import get_running_loop from .typedefs import LooseCookies -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application from .web_exceptions import HTTPException from .web_request import BaseRequest, Request @@ -131,7 +131,7 @@ async def close(self) -> None: """Release resolver""" -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: IterableBase = Iterable[Morsel[str]] else: IterableBase = Iterable diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 588ffbf6ec6..9aae12a84e8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -15,7 +15,7 @@ ssl = SSLContext = None # type: ignore[assignment] -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo else: RequestInfo = ClientResponse = ConnectionKey = None diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 1d946aea320..4ae0ecbcdfb 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -81,7 +81,7 @@ __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientSession from .connector import Connection from .tracing import Trace diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 61c26430860..73f58b1a451 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -63,7 +63,7 @@ __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientTimeout from .client_reqrep import ConnectionKey from .tracing import Trace diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 3a8793398d5..602a6b67457 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -59,7 +59,7 @@ ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client_reqrep import ClientResponse diff --git a/aiohttp/payload.py b/aiohttp/payload.py index ba856693eed..6593b05c6f7 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -54,7 +54,7 @@ TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from typing import List @@ -401,7 +401,7 @@ def __init__( ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from typing import AsyncIterable, AsyncIterator _AsyncIterator = AsyncIterator[bytes] diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 2a026fe704f..b5821a7fb84 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -50,7 +50,7 @@ ) from .web_protocol import _RequestHandler -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from ssl import SSLContext else: SSLContext = None diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 70e2a62ec1d..62847a0bf7c 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -8,7 +8,7 @@ from .client_reqrep import ClientResponse -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .client import ClientSession _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index 57d95b384f2..5e963e1a10e 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -17,7 +17,7 @@ DEFAULT_JSON_ENCODER = json.dumps DEFAULT_JSON_DECODER = json.loads -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _CIMultiDict = CIMultiDict[str] _CIMultiDictProxy = CIMultiDictProxy[str] _MultiDict = MultiDict[str] diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 6e822b80225..91bf5fdac61 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -60,7 +60,7 @@ __all__ = ("Application", "CleanupError") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _AppSignal = Signal[Callable[["Application"], Awaitable[None]]] _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]] _Middlewares = FrozenList[Middleware] @@ -561,7 +561,7 @@ def exceptions(self) -> List[BaseException]: return cast(List[BaseException], self.args[1]) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]] else: _CleanupContextBase = FrozenList diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0f39c70dcb8..eb7a6a31d39 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -31,7 +31,7 @@ __all__ = ("FileResponse",) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import BaseRequest diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index cb24eec9107..5da1533c0df 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -12,7 +12,7 @@ "normalize_path_middleware", ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application _Func = TypeVar("_Func") diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index d0ed0591c17..ec5856a0a22 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -44,7 +44,7 @@ __all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_server import Server diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a7e32ca6c79..61fc831b032 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -66,7 +66,7 @@ __all__ = ("BaseRequest", "FileField", "Request") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application from .web_protocol import RequestHandler from .web_urldispatcher import UrlMappingMatchInfo diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e089c60ee4c..b6a4ba9b31e 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -44,7 +44,7 @@ __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import BaseRequest BaseClass = MutableMapping[str, Any] diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index a1eb0a76549..d79cd32a14a 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -20,7 +20,7 @@ from .abc import AbstractView from .typedefs import Handler, PathLike -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_request import Request from .web_response import StreamResponse from .web_urldispatcher import AbstractRoute, UrlDispatcher diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index ddb6ede0dd1..0334c8c9b1e 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -69,7 +69,7 @@ ) -if TYPE_CHECKING: # pragma: no cover +if TYPE_CHECKING: from .web_app import Application BaseDict = Dict[str, str] From 32651f8e8b4af0159db71be16d0fd7f148c195de Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 3 Dec 2023 06:46:11 +0000 Subject: [PATCH 025/144] [PR #7927/da2e349a backport][3.9] Pin proxy.py to 2.4.4rc4 for testing (#7934) Co-authored-by: J. Nick Koston --- requirements/dev.txt | 2 +- requirements/test.in | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/dev.txt b/requirements/dev.txt index 9639c875dc1..915118c47db 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -129,7 +129,7 @@ pluggy==1.2.0 # via pytest pre-commit==3.5.0 # via -r requirements/lint.in -proxy-py==2.4.3 +proxy-py==2.4.4rc4 # via -r requirements/test.in pycares==4.3.0 # via aiodns diff --git a/requirements/test.in b/requirements/test.in index 5c1edf5dabe..3a82a00818a 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -3,7 +3,7 @@ coverage mypy; implementation_name == "cpython" -proxy.py +proxy.py >= 2.4.4rc4 pytest pytest-cov pytest-mock From 5ea4dbd872c5148b0b3d781a80c65f8fdcbcb243 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 4 Dec 2023 00:30:11 +0000 Subject: [PATCH 026/144] Revert time-machine back to freezegun (#7937) (#7939) (cherry picked from commit 43f92fae09bcc9692ee96ac1413eda884afa2f63) --- requirements/constraints.txt | 20 +++++--------------- requirements/dev.txt | 16 +++------------- requirements/test.in | 2 +- requirements/test.txt | 6 +++--- tests/test_cookiejar.py | 27 +++++---------------------- 5 files changed, 17 insertions(+), 54 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 37367af4773..adba72bb204 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -70,6 +70,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.3.2 # via virtualenv +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -89,11 +91,7 @@ idna==3.3 # yarl imagesize==1.3.0 # via sphinx -importlib-metadata==6.8.0 - # via sphinx -importlib-resources==6.1.0 - # via towncrier -incremental==21.3.0 +incremental==22.10.0 # via towncrier iniconfig==1.1.1 # via pytest @@ -167,11 +165,9 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in -pytz==2021.3 - # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -217,9 +213,7 @@ sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in -tomli==1.2.2 +tomli==2.0.1 # via # build # cherry-picker @@ -267,10 +261,6 @@ wheel==0.37.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/dev.txt b/requirements/dev.txt index 915118c47db..3d5926c12bd 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -66,6 +66,8 @@ exceptiongroup==1.1.2 # via pytest filelock==3.12.2 # via virtualenv +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -85,10 +87,6 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 - # via sphinx -importlib-resources==6.1.0 - # via towncrier incremental==22.10.0 # via towncrier iniconfig==2.0.0 @@ -158,11 +156,9 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in -pytz==2023.3.post1 - # via babel pyyaml==6.0.1 # via pre-commit re-assert==1.1.0 @@ -203,8 +199,6 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in tomli==2.0.1 # via # build @@ -254,10 +248,6 @@ wheel==0.41.0 # via pip-tools yarl==1.9.3 # via -r requirements/runtime-deps.in -zipp==3.17.0 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: pip==23.2.1 diff --git a/requirements/test.in b/requirements/test.in index 3a82a00818a..686cd6dbf2e 100644 --- a/requirements/test.in +++ b/requirements/test.in @@ -2,6 +2,7 @@ -r base.in coverage +freezegun mypy; implementation_name == "cpython" proxy.py >= 2.4.4rc4 pytest @@ -10,6 +11,5 @@ pytest-mock python-on-whales re-assert setuptools-git -time-machine; implementation_name == "cpython" trustme; platform_machine != "i686" # no 32-bit wheels wait-for-it diff --git a/requirements/test.txt b/requirements/test.txt index 6451eb45cfc..57c00fc2439 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -36,6 +36,8 @@ cryptography==41.0.2 # via trustme exceptiongroup==1.1.2 # via pytest +freezegun==1.3.0 + # via -r requirements/test.in frozenlist==1.4.0 # via # -r requirements/runtime-deps.in @@ -83,7 +85,7 @@ pytest-cov==4.1.0 pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 - # via time-machine + # via freezegun python-on-whales==0.67.0 # via -r requirements/test.in re-assert==1.1.0 @@ -96,8 +98,6 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -time-machine==2.13.0 ; implementation_name == "cpython" - # via -r requirements/test.in tomli==2.0.1 # via # coverage diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 522dcc8e1b6..cffca3a4b59 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -3,22 +3,17 @@ import itertools import os import pickle -import sys import tempfile import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from unittest import mock import pytest +from freezegun import freeze_time from yarl import URL from aiohttp import CookieJar, DummyCookieJar -try: - from time_machine import travel -except ImportError: - travel = None # type: ignore[assignment] - def dump_cookiejar() -> bytes: # pragma: no cover """Create pickled data for test_pickle_format().""" @@ -412,10 +407,10 @@ def timed_request(self, url, update_time, send_time): elif isinstance(send_time, float): send_time = datetime.datetime.fromtimestamp(send_time) - with travel(update_time, tick=False): + with freeze_time(update_time): self.jar.update_cookies(self.cookies_to_send) - with travel(send_time, tick=False): + with freeze_time(send_time): cookies_sent = self.jar.filter_cookies(URL(url)) self.jar.clear() @@ -607,10 +602,6 @@ def test_path_value(self) -> None: self.assertEqual(cookies_received["path-cookie"]["path"], "/somepath") self.assertEqual(cookies_received["wrong-path-cookie"]["path"], "/") - @unittest.skipIf( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", - ) def test_expires(self) -> None: ts_before = datetime.datetime( 1975, 1, 1, tzinfo=datetime.timezone.utc @@ -632,10 +623,6 @@ def test_expires(self) -> None: self.assertEqual(set(cookies_sent.keys()), {"shared-cookie"}) - @unittest.skipIf( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", - ) def test_max_age(self) -> None: cookies_sent = self.timed_request("http://maxagetest.com/", 1000, 1000) @@ -783,10 +770,6 @@ async def test_cookie_jar_clear_all(): assert len(sut) == 0 -@pytest.mark.skipif( - sys.implementation.name != "cpython", - reason="time_machine leverages CPython specific pointers https://github.com/adamchainz/time-machine/issues/305", -) async def test_cookie_jar_clear_expired(): sut = CookieJar() @@ -795,11 +778,11 @@ async def test_cookie_jar_clear_expired(): cookie["foo"] = "bar" cookie["foo"]["expires"] = "Tue, 1 Jan 1990 12:00:00 GMT" - with travel("1980-01-01", tick=False): + with freeze_time("1980-01-01"): sut.update_cookies(cookie) sut.clear(lambda x: False) - with travel("1980-01-01", tick=False): + with freeze_time("1980-01-01"): assert len(sut) == 0 From f03c4a8d8079ad4eabbde61db60075f9259eb48e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 11:10:59 +0000 Subject: [PATCH 027/144] Bump sigstore/gh-action-sigstore-python from 2.1.0 to 2.1.1 (#7966) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [sigstore/gh-action-sigstore-python](https://github.com/sigstore/gh-action-sigstore-python) from 2.1.0 to 2.1.1.
Release notes

Sourced from sigstore/gh-action-sigstore-python's releases.

v2.1.1

What's Changed

Full Changelog: https://github.com/sigstore/gh-action-sigstore-python/compare/v2.1.0...v2.1.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sigstore/gh-action-sigstore-python&package-manager=github_actions&previous-version=2.1.0&new-version=2.1.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 15a79d52ab5..bdab50c170b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -422,7 +422,7 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v2.1.0 + uses: sigstore/gh-action-sigstore-python@v2.1.1 with: inputs: >- ./dist/*.tar.gz From 5a906f98d5053fe80a6cf960486ee480545cf7fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 11:17:25 +0000 Subject: [PATCH 028/144] Bump github/codeql-action from 2 to 3 (#7965) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 2 to 3.
Release notes

Sourced from github/codeql-action's releases.

CodeQL Bundle v2.15.4

Bundles CodeQL CLI v2.15.4

Includes the following CodeQL language packs from github/codeql@codeql-cli/v2.15.4:

CodeQL Bundle

Bundles CodeQL CLI v2.15.3

Includes the following CodeQL language packs from github/codeql@codeql-cli/v2.15.3:

CodeQL Bundle

Bundles CodeQL CLI v2.15.2

Includes the following CodeQL language packs from github/codeql@codeql-cli/v2.15.2:

... (truncated)

Changelog

Sourced from github/codeql-action's changelog.

Commits
  • 3a9f6a8 update javascript files
  • cc4fead update version in various hardcoded locations
  • 183559c Merge branch 'main' into update-bundle/codeql-bundle-v2.15.4
  • 5b52b36 reintroduce PR check that confirm action can be still be compiled on node16
  • 5b19bef change to node20 for all actions
  • f2d0c2e upgrade node type definitions
  • d651fbc change to node20 for all actions
  • 382a50a Merge pull request #2021 from github/mergeback/v2.22.9-to-main-c0d1daa7
  • 458b422 Update checked-in dependencies
  • 5e0f9db Update changelog and version after v2.22.9
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github/codeql-action&package-manager=github_actions&previous-version=2&new-version=3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 9e2140430a3..601d45a35ad 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -29,17 +29,17 @@ jobs: uses: actions/checkout@v4 - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} config-file: ./.github/codeql.yml queries: +security-and-quality - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 if: ${{ matrix.language == 'python' || matrix.language == 'javascript' }} - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: category: "/language:${{ matrix.language }}" From e3b7cbef12edd33221239ec945ff78be267fb210 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 15:57:17 +0000 Subject: [PATCH 029/144] [PR #7961/5e44ba46 backport][3.9] Add reminder to use 'fixes' in PRs (#7968) **This is a backport of PR #7961 as merged into master (5e44ba465c2168c6e1842df441828ec5759093e1).** Co-authored-by: Sam Bull --- .github/PULL_REQUEST_TEMPLATE.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 237c61a659f..3ac54a518b5 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -11,6 +11,7 @@ ## Related issue number + ## Checklist From 46f6a90becf96f43db8b3b9acc627eb1b72bb216 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 14:35:04 -1000 Subject: [PATCH 030/144] Fix websocket connection leak (#7978) (#7979) --- CHANGES/7978.bugfix | 1 + aiohttp/web_ws.py | 94 +++++++++++++++++++++---------------- docs/web_reference.rst | 12 ++++- tests/test_web_websocket.py | 27 +++++++++++ 4 files changed, 93 insertions(+), 41 deletions(-) create mode 100644 CHANGES/7978.bugfix diff --git a/CHANGES/7978.bugfix b/CHANGES/7978.bugfix new file mode 100644 index 00000000000..3c7dc096ca7 --- /dev/null +++ b/CHANGES/7978.bugfix @@ -0,0 +1 @@ +Fix websocket connection leak diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 4e57bca4f69..783377716f5 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -140,9 +140,8 @@ def _send_heartbeat(self) -> None: def _pong_not_received(self) -> None: if self._req is not None and self._req.transport is not None: self._closed = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) self._exception = asyncio.TimeoutError() - self._req.transport.close() async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: # make pre-check to don't hide it by do_handshake() exceptions @@ -360,7 +359,10 @@ async def write_eof(self) -> None: # type: ignore[override] await self.close() self._eof_sent = True - async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool: + async def close( + self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True + ) -> bool: + """Close websocket connection.""" if self._writer is None: raise RuntimeError("Call .prepare() first") @@ -374,46 +376,53 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo reader.feed_data(WS_CLOSING_MESSAGE, 0) await self._waiting - if not self._closed: - self._closed = True - try: - await self._writer.close(code, message) - writer = self._payload_writer - assert writer is not None - await writer.drain() - except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - return True + if self._closed: + return False - if self._closing: - return True + self._closed = True + try: + await self._writer.close(code, message) + writer = self._payload_writer + assert writer is not None + if drain: + await writer.drain() + except (asyncio.CancelledError, asyncio.TimeoutError): + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True - reader = self._reader - assert reader is not None - try: - async with async_timeout.timeout(self._timeout): - msg = await reader.read() - except asyncio.CancelledError: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - raise - except Exception as exc: - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = exc - return True + if self._closing: + return True - if msg.type == WSMsgType.CLOSE: - self._close_code = msg.data - return True + reader = self._reader + assert reader is not None + try: + async with async_timeout.timeout(self._timeout): + msg = await reader.read() + except asyncio.CancelledError: + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + raise + except Exception as exc: + self._exception = exc + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + return True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE - self._exception = asyncio.TimeoutError() + if msg.type == WSMsgType.CLOSE: + self._set_code_close_transport(msg.data) return True - else: - return False + + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + self._exception = asyncio.TimeoutError() + return True + + def _set_code_close_transport(self, code: WSCloseCode) -> None: + """Set the close code and close the transport.""" + self._close_code = code + if self._req is not None and self._req.transport is not None: + self._req.transport.close() async def receive(self, timeout: Optional[float] = None) -> WSMessage: if self._reader is None: @@ -444,7 +453,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: set_result(waiter, True) self._waiting = None except (asyncio.CancelledError, asyncio.TimeoutError): - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) raise except EofStream: self._close_code = WSCloseCode.OK @@ -464,8 +473,13 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: if msg.type == WSMsgType.CLOSE: self._closing = True self._close_code = msg.data + # Could be closed while awaiting reader. if not self._closed and self._autoclose: - await self.close() + # The client is likely going to close the + # connection out from under us so we do not + # want to drain any pending writes as it will + # likely result writing to a broken pipe. + await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: self._closing = True elif msg.type == WSMsgType.PING and self._autoping: diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 1351e76d25d..1d92678a083 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -970,6 +970,14 @@ and :ref:`aiohttp-web-signals` handlers:: .. versionadded:: 3.3 + :param bool autoclose: Close connection when the client sends + a :const:`~aiohttp.WSMsgType.CLOSE` message, + ``True`` by default. If set to ``False``, + the connection is not closed and the + caller is responsible for calling + ``request.transport.close()`` to avoid + leaking resources. + The class supports ``async for`` statement for iterating over incoming messages:: @@ -1146,7 +1154,7 @@ and :ref:`aiohttp-web-signals` handlers:: The method is converted into :term:`coroutine`, *compress* parameter added. - .. method:: close(*, code=WSCloseCode.OK, message=b'') + .. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True) :async: A :ref:`coroutine` that initiates closing @@ -1160,6 +1168,8 @@ and :ref:`aiohttp-web-signals` handlers:: :class:`str` (converted to *UTF-8* encoded bytes) or :class:`bytes`. + :param bool drain: drain outgoing buffer before closing connection. + :raise RuntimeError: if connection is not started .. method:: receive(timeout=None) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 0ec1b5a7d84..d0aca0c019a 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -1,4 +1,6 @@ import asyncio +import time +from typing import Any from unittest import mock import aiosignal @@ -165,6 +167,20 @@ async def test_write_non_prepared() -> None: await ws.write(b"data") +async def test_heartbeat_timeout(make_request: Any) -> None: + """Verify the transport is closed when the heartbeat timeout is reached.""" + loop = asyncio.get_running_loop() + future = loop.create_future() + req = make_request("GET", "/") + lowest_time = time.get_clock_info("monotonic").resolution + req._protocol._timeout_ceil_threshold = lowest_time + ws = WebSocketResponse(heartbeat=lowest_time, timeout=lowest_time) + await ws.prepare(req) + ws._req.transport.close.side_effect = lambda: future.set_result(None) + await future + assert ws.closed + + def test_websocket_ready() -> None: websocket_ready = WebSocketReady(True, "chat") assert websocket_ready.ok is True @@ -233,6 +249,7 @@ async def test_send_str_closed(make_request) -> None: await ws.prepare(req) ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() + assert len(ws._req.transport.close.mock_calls) == 1 with pytest.raises(ConnectionError): await ws.send_str("string") @@ -289,6 +306,8 @@ async def test_close_idempotent(make_request) -> None: ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) assert await ws.close(code=1, message="message1") assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + assert not (await ws.close(code=2, message="message2")) @@ -322,12 +341,15 @@ async def test_write_eof_idempotent(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 + ws._reader.feed_data(WS_CLOSED_MESSAGE, 0) await ws.close() await ws.write_eof() await ws.write_eof() await ws.write_eof() + assert len(ws._req.transport.close.mock_calls) == 1 async def test_receive_eofstream_in_reader(make_request, loop) -> None: @@ -353,6 +375,7 @@ async def test_receive_timeouterror(make_request, loop) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 ws._reader = mock.Mock() res = loop.create_future() @@ -362,6 +385,8 @@ async def test_receive_timeouterror(make_request, loop) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() + assert len(ws._req.transport.close.mock_calls) == 1 + async def test_multiple_receive_on_close_connection(make_request) -> None: req = make_request("GET", "/") @@ -394,6 +419,7 @@ async def test_close_exc(make_request) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) + assert len(ws._req.transport.close.mock_calls) == 0 exc = ValueError() ws._writer = mock.Mock() @@ -401,6 +427,7 @@ async def test_close_exc(make_request) -> None: await ws.close() assert ws.closed assert ws.exception() is exc + assert len(ws._req.transport.close.mock_calls) == 1 ws._closed = False ws._writer.close.side_effect = asyncio.CancelledError() From fca5a7a0c8f3e713a4af2568fa94209f59f16fca Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 10 Jan 2024 14:56:40 -1000 Subject: [PATCH 031/144] [PR #8012/5f699bbb backport][3.9] Fix FileResponse doing blocking I/O in the event loop (#8015) Co-authored-by: J. Nick Koston --- CHANGES/8012.bugfix | 1 + aiohttp/web_fileresponse.py | 28 ++++++++++++++++++++-------- tests/test_web_sendfile.py | 8 ++++---- 3 files changed, 25 insertions(+), 12 deletions(-) create mode 100644 CHANGES/8012.bugfix diff --git a/CHANGES/8012.bugfix b/CHANGES/8012.bugfix new file mode 100644 index 00000000000..f5187075f3f --- /dev/null +++ b/CHANGES/8012.bugfix @@ -0,0 +1 @@ +Fix `web.FileResponse` doing blocking I/O in the event loop diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index eb7a6a31d39..c3b3814974e 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -124,19 +124,31 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + def _get_file_path_stat_and_gzip( + self, check_for_gzipped_file: bool + ) -> Tuple[pathlib.Path, os.stat_result, bool]: + """Return the file path, stat result, and gzip status. + + This method should be called from a thread executor + since it calls os.stat which may block. + """ filepath = self._path - - gzip = False - if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""): + if check_for_gzipped_file: gzip_path = filepath.with_name(filepath.name + ".gz") + try: + return gzip_path, gzip_path.stat(), True + except OSError: + # Fall through and try the non-gzipped file + pass - if gzip_path.is_file(): - filepath = gzip_path - gzip = True + return filepath, filepath.stat(), False + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - st: os.stat_result = await loop.run_in_executor(None, filepath.stat) + check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + filepath, st, gzip = await loop.run_in_executor( + None, self._get_file_path_stat_and_gzip, check_for_gzipped_file + ) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" last_modified = st.st_mtime diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 0258def090f..2817e085a6f 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -12,7 +12,6 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = True gz_filepath.stat.return_value.st_size = 1024 gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -34,7 +33,8 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = True + gz_filepath.stat.return_value.st_size = 1024 + gz_filepath.stat.return_value.st_mtime_ns = 1603733507222449291 filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -56,7 +56,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: request = make_mocked_request("GET", "http://python.org/logo.png", headers={}) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = False + gz_filepath.stat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" @@ -80,7 +80,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: ) gz_filepath = mock.create_autospec(Path, spec_set=True) - gz_filepath.is_file.return_value = False + gz_filepath.stat.side_effect = OSError(2, "No such file or directory") filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" From f93e55c77d157c34d59c7b46e74cc15553156838 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 10 Jan 2024 15:51:34 -1000 Subject: [PATCH 032/144] Fix double compress when compression enabled and compressed file exists (#8014) (#8017) (cherry picked from commit 92655a5) --- CHANGES/8014.bugfix | 1 + aiohttp/web_fileresponse.py | 4 ++++ tests/test_web_sendfile_functional.py | 30 ++++++++++++++++++++++++++- 3 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8014.bugfix diff --git a/CHANGES/8014.bugfix b/CHANGES/8014.bugfix new file mode 100644 index 00000000000..681bb5966ae --- /dev/null +++ b/CHANGES/8014.bugfix @@ -0,0 +1 @@ +Fix double compress when compression enabled and compressed file exists diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index c3b3814974e..6496ffaf317 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -267,6 +267,10 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.headers[hdrs.CONTENT_ENCODING] = encoding if gzip: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + # Disable compression if we are already sending + # a compressed file since we don't want to double + # compress. + self._compression = False self.etag = etag_value # type: ignore[assignment] self.last_modified = st.st_mtime # type: ignore[assignment] diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 3f4f13354ec..31f22892f66 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -250,7 +250,35 @@ async def handler(request): await client.close() -async def test_static_file_with_content_encoding(aiohttp_client, sender) -> None: +async def test_static_file_with_gziped_counter_part_enable_compression( + aiohttp_client: Any, sender: Any +): + """Test that enable_compression does not double compress when a .gz file is also present.""" + filepath = pathlib.Path(__file__).parent / "hello.txt" + + async def handler(request): + resp = sender(filepath) + resp.enable_compression() + return resp + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + resp = await client.get("/") + assert resp.status == 200 + body = await resp.read() + assert body == b"hello aiohttp\n" + assert resp.headers["Content-Type"] == "text/plain" + assert resp.headers.get("Content-Encoding") == "gzip" + resp.close() + await resp.release() + await client.close() + + +async def test_static_file_with_content_encoding( + aiohttp_client: Any, sender: Any +) -> None: filepath = pathlib.Path(__file__).parent / "hello.txt.gz" async def handler(request): From 64c4bc6a5620e138574a0c06783eee1f9a3e6203 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Jan 2024 10:33:41 +0000 Subject: [PATCH 033/144] Bump actions/cache from 3.3.2 to 3.3.3 (#8025) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 3.3.2 to 3.3.3.
Release notes

Sourced from actions/cache's releases.

v3.3.3

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v3...v3.3.3

Changelog

Sourced from actions/cache's changelog.

3.3.2

  • Fixes bug with Azure SDK causing blob downloads to get stuck.

3.3.3

  • Updates @​actions/cache to v3.2.3 to fix accidental mutated path arguments to getCacheVersion actions/toolkit#1378
  • Additional audit fixes of npm package(s)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=3.3.2&new-version=3.3.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index bdab50c170b..3ffc4da8d31 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v3.3.2 + uses: actions/cache@v3.3.3 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v3.3.2 + uses: actions/cache@v3.3.3 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} @@ -184,7 +184,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v3.3.2 + uses: actions/cache@v3.3.3 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 41968f892dfdd098d28d2f55561f5009461e661f Mon Sep 17 00:00:00 2001 From: igorvoltaic Date: Fri, 12 Jan 2024 22:15:51 +0300 Subject: [PATCH 034/144] Backport to 3.9: Add runtime type check for `ClientSession` `timeout` param (#8022) (#8027) --- CHANGES/8021.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 9 +++++++-- tests/test_client_session.py | 13 +++++++------ 4 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8021.bugfix diff --git a/CHANGES/8021.bugfix b/CHANGES/8021.bugfix new file mode 100644 index 00000000000..f43843a587f --- /dev/null +++ b/CHANGES/8021.bugfix @@ -0,0 +1 @@ +Add runtime type check for ``ClientSession`` ``timeout`` parameter. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 931d5c5b9aa..e94381dcf28 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -143,6 +143,7 @@ Hugo Hromic Hugo van Kemenade Hynek Schlawack Igor Alexandrov +Igor Bolshakov Igor Davydenko Igor Mozharovsky Igor Pavlov diff --git a/aiohttp/client.py b/aiohttp/client.py index 83ef1ba586a..bcf32fdab9d 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -272,7 +272,7 @@ def __init__( self._default_auth = auth self._version = version self._json_serialize = json_serialize - if timeout is sentinel: + if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: warnings.warn( @@ -289,7 +289,12 @@ def __init__( stacklevel=2, ) else: - self._timeout = timeout # type: ignore[assignment] + if not isinstance(timeout, ClientTimeout): + raise ValueError( + f"timeout parameter cannot be of {type(timeout)} type, " + "please use 'timeout=ClientTimeout(...)'", + ) + self._timeout = timeout if read_timeout is not sentinel: raise ValueError( "read_timeout and timeout parameters " diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 2823fc46244..a0654ed8ccd 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -814,12 +814,6 @@ async def test_client_session_timeout_default_args(loop) -> None: await session1.close() -async def test_client_session_timeout_argument() -> None: - session = ClientSession(timeout=500) - assert session.timeout == 500 - await session.close() - - async def test_client_session_timeout_zero() -> None: timeout = client.ClientTimeout(total=10, connect=0, sock_connect=0, sock_read=0) try: @@ -829,6 +823,13 @@ async def test_client_session_timeout_zero() -> None: pytest.fail("0 should disable timeout.") +async def test_client_session_timeout_bad_argument() -> None: + with pytest.raises(ValueError): + ClientSession(timeout="test_bad_argumnet") + with pytest.raises(ValueError): + ClientSession(timeout=100) + + async def test_requote_redirect_url_default() -> None: session = ClientSession() assert session.requote_redirect_url From ca9b3ecb021c3dd61207dec8edb915b6a567ada4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Jan 2024 11:11:50 +0000 Subject: [PATCH 035/144] Bump actions/cache from 3.3.3 to 4.0.0 (#8031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 3.3.3 to 4.0.0.
Release notes

Sourced from actions/cache's releases.

v4.0.0

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v3...v4.0.0

Changelog

Sourced from actions/cache's changelog.

3.3.3

  • Updates @​actions/cache to v3.2.3 to fix accidental mutated path arguments to getCacheVersion actions/toolkit#1378
  • Additional audit fixes of npm package(s)

4.0.0

  • Updated minimum runner version support from node 12 -> node 20
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=3.3.3&new-version=4.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 3ffc4da8d31..6f30481641c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v3.3.3 + uses: actions/cache@v4.0.0 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v3.3.3 + uses: actions/cache@v4.0.0 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} @@ -184,7 +184,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v3.3.3 + uses: actions/cache@v4.0.0 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 5cdc0fc9d1bd4c9601e59c3d9744bd1296998cfe Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 19 Jan 2024 14:19:10 -1000 Subject: [PATCH 036/144] [PR #8033/5424c534 backport][3.9] Small cleanups to WebSocketWriter (#8036) --- aiohttp/http_websocket.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index f395a27614a..b63453f99e5 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -605,7 +605,7 @@ def __init__( *, use_mask: bool = False, limit: int = DEFAULT_LIMIT, - random: Any = random.Random(), + random: random.Random = random.Random(), compress: int = 0, notakeover: bool = False, ) -> None: @@ -668,20 +668,20 @@ async def _send_frame( else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: - mask = self.randrange(0, 0xFFFFFFFF) - mask = mask.to_bytes(4, "big") + mask_int = self.randrange(0, 0xFFFFFFFF) + mask = mask_int.to_bytes(4, "big") message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + len(message) + self._output_size += len(header) + len(mask) + msg_length else: - if len(message) > MSG_SIZE: + if msg_length > MSG_SIZE: self._write(header) self._write(message) else: self._write(header + message) - self._output_size += len(header) + len(message) + self._output_size += len(header) + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. From f00017466a1c6639a363dc9d5ef747147cb8cbfa Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 20 Jan 2024 15:16:54 +0000 Subject: [PATCH 037/144] [PR #8010/2670e7b0 backport][3.9] Fix the Towncrier philosophy link (#8039) **This is a backport of PR #8010 as merged into master (2670e7b08da179e74a643dca8d795fd23fcd282e).** --- CHANGES/8010.doc | 2 ++ CHANGES/README.rst | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 CHANGES/8010.doc diff --git a/CHANGES/8010.doc b/CHANGES/8010.doc new file mode 100644 index 00000000000..db1b0aa3225 --- /dev/null +++ b/CHANGES/8010.doc @@ -0,0 +1,2 @@ +On the `CHANGES/README.rst `_ page, +a link to the ``Towncrier philosophy`` has been fixed. diff --git a/CHANGES/README.rst b/CHANGES/README.rst index c6b5153913a..9f619296351 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -92,4 +92,4 @@ File :file:`CHANGES/4594.feature.rst`: (``tool.towncrier.type``). .. _Towncrier philosophy: - https://towncrier.readthedocs.io/en/actual-freaking-docs/#philosophy + https://towncrier.readthedocs.io/en/stable/#philosophy From 034e5e34ee11c6138c773d85123490e691e1b708 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 00:03:56 +0000 Subject: [PATCH 038/144] [PR #8042/4b91b530 backport][3.9] Tightening the runtime type check for ssl (#7698) (#8043) **This is a backport of PR #8042 as merged into 3.10 (4b91b530e851acec62c7e9db4cf5c086bf153340).** Currently, the valid types of ssl parameter are SSLContext, Literal[False], Fingerprint or None. If user sets ssl = False, we disable ssl certificate validation which makes total sense. But if user set ssl = True by mistake, instead of enabling ssl certificate validation or raising errors, we silently disable the validation too which is a little subtle but weird. In this PR, we added a check that if user sets ssl=True, we enable certificate validation by treating it as using Default SSL Context. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Sviatoslav Sydorenko Co-authored-by: Sam Bull Co-authored-by: J. Nick Koston Co-authored-by: Sam Bull (cherry picked from commit 9e14ea19b5a48bb26797babc32202605066cb5f5) --- CHANGES/7698.feature | 1 + aiohttp/client.py | 12 +++++++----- aiohttp/client_exceptions.py | 6 +++--- aiohttp/client_reqrep.py | 21 ++++++++++----------- aiohttp/connector.py | 6 +++--- tests/test_client_exceptions.py | 10 +++++----- tests/test_client_fingerprint.py | 6 +++--- tests/test_client_request.py | 4 ++-- tests/test_connector.py | 16 ++++++++-------- tests/test_proxy.py | 4 ++-- 10 files changed, 44 insertions(+), 42 deletions(-) create mode 100644 CHANGES/7698.feature diff --git a/CHANGES/7698.feature b/CHANGES/7698.feature new file mode 100644 index 00000000000..e8c4b3fb452 --- /dev/null +++ b/CHANGES/7698.feature @@ -0,0 +1 @@ +Added support for passing `True` to `ssl` while deprecating `None`. -- by :user:`xiangyan99` diff --git a/aiohttp/client.py b/aiohttp/client.py index bcf32fdab9d..7e9b32fad6f 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -22,7 +22,6 @@ Generic, Iterable, List, - Literal, Mapping, Optional, Set, @@ -408,7 +407,7 @@ async def _request( verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, - ssl: Optional[Union[SSLContext, Literal[False], Fingerprint]] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, server_hostname: Optional[str] = None, proxy_headers: Optional[LooseHeaders] = None, trace_request_ctx: Optional[SimpleNamespace] = None, @@ -562,7 +561,7 @@ async def _request( proxy_auth=proxy_auth, timer=timer, session=self, - ssl=ssl, + ssl=ssl if ssl is not None else True, server_hostname=server_hostname, proxy_headers=proxy_headers, traces=traces, @@ -738,7 +737,7 @@ def ws_connect( headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + ssl: Union[SSLContext, bool, None, Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -790,7 +789,7 @@ async def _ws_connect( headers: Optional[LooseHeaders] = None, proxy: Optional[StrOrURL] = None, proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, Literal[False], None, Fingerprint] = None, + ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: Optional[SSLContext] = None, @@ -824,6 +823,9 @@ async def _ws_connect( extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + # For the sake of backward compatibility, if user passes in None, convert it to True + if ssl is None: + ssl = True ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) # send request diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 9aae12a84e8..9b6e44203c8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -180,12 +180,12 @@ def port(self) -> Optional[int]: return self._conn_key.port @property - def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]: + def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: return self._conn_key.ssl def __str__(self) -> str: return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( - self, self.ssl if self.ssl is not None else "default", self.strerror + self, "default" if self.ssl is True else self.ssl, self.strerror ) # OSError.__reduce__ does too much black magick @@ -219,7 +219,7 @@ def path(self) -> str: def __str__(self) -> str: return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( - self, self.ssl if self.ssl is not None else "default", self.strerror + self, "default" if self.ssl is True else self.ssl, self.strerror ) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4ae0ecbcdfb..bb43ae9318d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -17,7 +17,6 @@ Dict, Iterable, List, - Literal, Mapping, Optional, Tuple, @@ -151,22 +150,22 @@ def check(self, transport: asyncio.Transport) -> None: if ssl is not None: SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None)) else: # pragma: no cover - SSL_ALLOWED_TYPES = type(None) + SSL_ALLOWED_TYPES = (bool, type(None)) def _merge_ssl_params( - ssl: Union["SSLContext", Literal[False], Fingerprint, None], + ssl: Union["SSLContext", bool, Fingerprint], verify_ssl: Optional[bool], ssl_context: Optional["SSLContext"], fingerprint: Optional[bytes], -) -> Union["SSLContext", Literal[False], Fingerprint, None]: +) -> Union["SSLContext", bool, Fingerprint]: if verify_ssl is not None and not verify_ssl: warnings.warn( "verify_ssl is deprecated, use ssl=False instead", DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -179,7 +178,7 @@ def _merge_ssl_params( DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -192,7 +191,7 @@ def _merge_ssl_params( DeprecationWarning, stacklevel=3, ) - if ssl is not None: + if ssl is not True: raise ValueError( "verify_ssl, ssl_context, fingerprint and ssl " "parameters are mutually exclusive" @@ -214,7 +213,7 @@ class ConnectionKey: host: str port: Optional[int] is_ssl: bool - ssl: Union[SSLContext, None, Literal[False], Fingerprint] + ssl: Union[SSLContext, bool, Fingerprint] proxy: Optional[URL] proxy_auth: Optional[BasicAuth] proxy_headers_hash: Optional[int] # hash(CIMultiDict) @@ -276,7 +275,7 @@ def __init__( proxy_auth: Optional[BasicAuth] = None, timer: Optional[BaseTimerContext] = None, session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, Literal[False], Fingerprint, None] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, proxy_headers: Optional[LooseHeaders] = None, traces: Optional[List["Trace"]] = None, trust_env: bool = False, @@ -315,7 +314,7 @@ def __init__( real_response_class = response_class self.response_class: Type[ClientResponse] = real_response_class self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl + self._ssl = ssl if ssl is not None else True self.server_hostname = server_hostname if loop.get_debug(): @@ -357,7 +356,7 @@ def is_ssl(self) -> bool: return self.url.scheme in ("https", "wss") @property - def ssl(self) -> Union["SSLContext", None, Literal[False], Fingerprint]: + def ssl(self) -> Union["SSLContext", bool, Fingerprint]: return self._ssl @property diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 73f58b1a451..3b9841dd094 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -762,7 +762,7 @@ def __init__( ttl_dns_cache: Optional[int] = 10, family: int = 0, ssl_context: Optional[SSLContext] = None, - ssl: Union[None, Literal[False], Fingerprint, SSLContext] = None, + ssl: Union[bool, Fingerprint, SSLContext] = True, local_addr: Optional[Tuple[str, int]] = None, resolver: Optional[AbstractResolver] = None, keepalive_timeout: Union[None, float, object] = sentinel, @@ -955,13 +955,13 @@ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: sslcontext = req.ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext - if sslcontext is not None: + if sslcontext is not True: # not verified or fingerprinted return self._make_ssl_context(False) sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext - if sslcontext is not None: + if sslcontext is not True: # not verified or fingerprinted return self._make_ssl_context(False) return self._make_ssl_context(True) diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index 8f34e4cc73c..f70ba5d09a6 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -119,7 +119,7 @@ class TestClientConnectorError: host="example.com", port=8080, is_ssl=False, - ssl=None, + ssl=True, proxy=None, proxy_auth=None, proxy_headers_hash=None, @@ -136,7 +136,7 @@ def test_ctor(self) -> None: assert err.os_error.strerror == "No such file" assert err.host == "example.com" assert err.port == 8080 - assert err.ssl is None + assert err.ssl is True def test_pickle(self) -> None: err = client.ClientConnectorError( @@ -153,7 +153,7 @@ def test_pickle(self) -> None: assert err2.os_error.strerror == "No such file" assert err2.host == "example.com" assert err2.port == 8080 - assert err2.ssl is None + assert err2.ssl is True assert err2.foo == "bar" def test_repr(self) -> None: @@ -171,7 +171,7 @@ def test_str(self) -> None: os_error=OSError(errno.ENOENT, "No such file"), ) assert str(err) == ( - "Cannot connect to host example.com:8080 ssl:" "default [No such file]" + "Cannot connect to host example.com:8080 ssl:default [No such file]" ) @@ -180,7 +180,7 @@ class TestClientConnectorCertificateError: host="example.com", port=8080, is_ssl=False, - ssl=None, + ssl=True, proxy=None, proxy_auth=None, proxy_headers_hash=None, diff --git a/tests/test_client_fingerprint.py b/tests/test_client_fingerprint.py index b1ae3cae36e..68dd528e0a2 100644 --- a/tests/test_client_fingerprint.py +++ b/tests/test_client_fingerprint.py @@ -37,7 +37,7 @@ def test_fingerprint_check_no_ssl() -> None: def test__merge_ssl_params_verify_ssl() -> None: with pytest.warns(DeprecationWarning): - assert _merge_ssl_params(None, False, None, None) is False + assert _merge_ssl_params(True, False, None, None) is False def test__merge_ssl_params_verify_ssl_conflict() -> None: @@ -50,7 +50,7 @@ def test__merge_ssl_params_verify_ssl_conflict() -> None: def test__merge_ssl_params_ssl_context() -> None: ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) with pytest.warns(DeprecationWarning): - assert _merge_ssl_params(None, None, ctx, None) is ctx + assert _merge_ssl_params(True, None, ctx, None) is ctx def test__merge_ssl_params_ssl_context_conflict() -> None: @@ -64,7 +64,7 @@ def test__merge_ssl_params_ssl_context_conflict() -> None: def test__merge_ssl_params_fingerprint() -> None: digest = hashlib.sha256(b"123").digest() with pytest.warns(DeprecationWarning): - ret = _merge_ssl_params(None, None, None, digest) + ret = _merge_ssl_params(True, None, None, digest) assert ret.fingerprint == digest diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c8ce98d4034..6521b70ad55 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -166,7 +166,7 @@ def test_host_port_default_http(make_request) -> None: req = make_request("get", "http://python.org/") assert req.host == "python.org" assert req.port == 80 - assert not req.ssl + assert not req.is_ssl() def test_host_port_default_https(make_request) -> None: @@ -400,7 +400,7 @@ def test_ipv6_default_http_port(make_request) -> None: req = make_request("get", "http://[2001:db8::1]/") assert req.host == "2001:db8::1" assert req.port == 80 - assert not req.ssl + assert not req.is_ssl() def test_ipv6_default_https_port(make_request) -> None: diff --git a/tests/test_connector.py b/tests/test_connector.py index f27d4131049..dc8aa3c2605 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -28,19 +28,19 @@ @pytest.fixture() def key(): # Connection key - return ConnectionKey("localhost", 80, False, None, None, None, None) + return ConnectionKey("localhost", 80, False, True, None, None, None) @pytest.fixture def key2(): # Connection key - return ConnectionKey("localhost", 80, False, None, None, None, None) + return ConnectionKey("localhost", 80, False, True, None, None, None) @pytest.fixture def ssl_key(): # Connection key - return ConnectionKey("localhost", 80, True, None, None, None, None) + return ConnectionKey("localhost", 80, True, True, None, None, None) @pytest.fixture @@ -1219,9 +1219,9 @@ async def test_cleanup_closed_disabled(loop, mocker) -> None: assert not conn._cleanup_closed_transports -async def test_tcp_connector_ctor(loop) -> None: - conn = aiohttp.TCPConnector(loop=loop) - assert conn._ssl is None +async def test_tcp_connector_ctor() -> None: + conn = aiohttp.TCPConnector() + assert conn._ssl is True assert conn.use_dns_cache assert conn.family == 0 @@ -1307,7 +1307,7 @@ async def test___get_ssl_context3(loop) -> None: conn = aiohttp.TCPConnector(loop=loop, ssl=ctx) req = mock.Mock() req.is_ssl.return_value = True - req.ssl = None + req.ssl = True assert conn._get_ssl_context(req) is ctx @@ -1333,7 +1333,7 @@ async def test___get_ssl_context6(loop) -> None: conn = aiohttp.TCPConnector(loop=loop) req = mock.Mock() req.is_ssl.return_value = True - req.ssl = None + req.ssl = True assert conn._get_ssl_context(req) is conn._make_ssl_context(True) diff --git a/tests/test_proxy.py b/tests/test_proxy.py index 1ff53e3f899..6366a13d573 100644 --- a/tests/test_proxy.py +++ b/tests/test_proxy.py @@ -75,7 +75,7 @@ async def make_conn(): auth=None, headers={"Host": "www.python.org"}, loop=self.loop, - ssl=None, + ssl=True, ) conn.close() @@ -117,7 +117,7 @@ async def make_conn(): auth=None, headers={"Host": "www.python.org", "Foo": "Bar"}, loop=self.loop, - ssl=None, + ssl=True, ) conn.close() From 437ac47fe332106a07a2d5335bb89619f1bc23f7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 01:51:59 +0000 Subject: [PATCH 039/144] [PR #7995/43a5bc50 backport][3.9] Fix examples of `fallback_charset_resolver` function in client_advanced documentation (#8044) **This is a backport of PR #7995 as merged into master (43a5bc5097be31a25037fbfdbe39e86138a29cbd).** Co-authored-by: OMOTO Tsukasa --- CHANGES/7995.doc | 1 + docs/client_advanced.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/7995.doc diff --git a/CHANGES/7995.doc b/CHANGES/7995.doc new file mode 100644 index 00000000000..70e3dfa5469 --- /dev/null +++ b/CHANGES/7995.doc @@ -0,0 +1 @@ +Fix examples of `fallback_charset_resolver` function in client_advanced documentation. -- by :user:`henry0312` diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst index 037e50a9363..958e31dcc7c 100644 --- a/docs/client_advanced.rst +++ b/docs/client_advanced.rst @@ -674,7 +674,7 @@ example, this can be used with the ``chardetng_py`` library.:: def charset_resolver(resp: ClientResponse, body: bytes) -> str: tld = resp.url.host.rsplit(".", maxsplit=1)[-1] - return detect(body, allow_utf8=True, tld=tld) + return detect(body, allow_utf8=True, tld=tld.encode()) ClientSession(fallback_charset_resolver=charset_resolver) @@ -682,4 +682,4 @@ Or, if ``chardetng_py`` doesn't work for you, then ``charset-normalizer`` is ano from charset_normalizer import detect - ClientSession(fallback_charset_resolver=lamba r, b: detect(b)["encoding"] or "utf-8") + ClientSession(fallback_charset_resolver=lambda r, b: detect(b)["encoding"] or "utf-8") From a54dab3b36bcf0d815b9244f52ae7bc5da08f387 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 21 Jan 2024 21:39:11 +0000 Subject: [PATCH 040/144] [PR #8049/a379e634 backport][3.9] Set cause for ClientPayloadError (#8050) **This is a backport of PR #8049 as merged into master (a379e6344432d5c033f78c2733fe69659e3cff50).** --- aiohttp/client_proto.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 6225b33667c..ca99808080d 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -81,11 +81,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception: + except Exception as e: if self._payload is not None: - self._payload.set_exception( - ClientPayloadError("Response payload is not completed") - ) + exc = ClientPayloadError("Response payload is not completed") + exc.__cause__ = e + self._payload.set_exception(exc) if not self.is_eof(): if isinstance(exc, OSError): From 419d715c42c46daf1a59e0aff61c1f6d10236982 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 03:34:42 +0100 Subject: [PATCH 041/144] =?UTF-8?q?[PR=20#8066/cba34699=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=92=85=F0=9F=93=9D=20Restructure=20the=20changelog=20f?= =?UTF-8?q?or=20clarity=20(#8068)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8066 as merged into master (cba346995b953b23421079ee0bccdfe85d736e7a).** PR #8066 (cherry picked from commit cba346995b953b23421079ee0bccdfe85d736e7a) ## What do these changes do? ## Are there changes in behavior for the user? ## Related issue number ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. --- .github/PULL_REQUEST_TEMPLATE.md | 42 +++++++++++++----- .github/workflows/ci-cd.yml | 6 ++- .pre-commit-config.yaml | 30 ++++++++++++- CHANGES/.TEMPLATE.rst | 52 +++++++++++++++++++++-- CHANGES/.gitignore | 27 ++++++++++++ CHANGES/8066.contrib.rst | 21 +++++++++ CHANGES/8066.packaging.rst | 1 + CHANGES/README.rst | 25 ++++++++--- docs/spelling_wordlist.txt | 7 +++ pyproject.toml | 73 +++++++++++++++++++++++++++++--- tools/check_changes.py | 17 +++++++- tools/cleanup_changes.py | 24 ++++++++--- 12 files changed, 289 insertions(+), 36 deletions(-) create mode 100644 CHANGES/8066.contrib.rst create mode 120000 CHANGES/8066.packaging.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 3ac54a518b5..686f70cd975 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -21,13 +21,35 @@ - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. -- [ ] Add a new news fragment into the `CHANGES` folder - * name it `.` for example (588.bugfix) - * if you don't have an `issue_id` change it to the pr id after creating the pr - * ensure type is one of the following: - * `.feature`: Signifying a new feature. - * `.bugfix`: Signifying a bug fix. - * `.doc`: Signifying a documentation improvement. - * `.removal`: Signifying a deprecation or removal of public API. - * `.misc`: A ticket has been closed, but it is not of interest to users. - * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." +- [ ] Add a new news fragment into the `CHANGES/` folder + * name it `..rst` (e.g. `588.bugfix.rst`) + * if you don't have an issue number, change it to the pull request + number after creating the PR + * `.bugfix`: A bug fix for something the maintainers deemed an + improper undesired behavior that got corrected to match + pre-agreed expectations. + * `.feature`: A new behavior, public APIs. That sort of stuff. + * `.deprecation`: A declaration of future API removals and breaking + changes in behavior. + * `.breaking`: When something public is removed in a breaking way. + Could be deprecated in an earlier release. + * `.doc`: Notable updates to the documentation structure or build + process. + * `.packaging`: Notes for downstreams about unobvious side effects + and tooling. Changes in the test invocation considerations and + runtime assumptions. + * `.contrib`: Stuff that affects the contributor experience. e.g. + Running tests, building the docs, setting up the development + environment. + * `.misc`: Changes that are hard to assign to any of the above + categories. + * Make sure to use full sentences with correct case and punctuation, + for example: + ```rst + Fixed issue with non-ascii contents in doctest text files + -- by :user:`contributor-gh-handle`. + ``` + + Use the past tense or the present tense a non-imperative mood, + referring to what's changed compared to the last released version + of this project. diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 6f30481641c..af3c0bf6f23 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -414,8 +414,10 @@ jobs: version_file: aiohttp/__init__.py github_token: ${{ secrets.GITHUB_TOKEN }} dist_dir: dist - fix_issue_regex: "`#(\\d+) `_" - fix_issue_repl: "(#\\1)" + fix_issue_regex: >- + :issue:`(\d+)` + fix_issue_repl: >- + #\1 - name: >- Publish 🐍📦 to PyPI diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 587c46e991d..d11ab1bfa32 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,9 +6,35 @@ repos: language: fail entry: >- Changelog files must be named - ####.(bugfix|feature|removal|doc|misc)(.#)?(.rst)? + ####.( + bugfix + | feature + | deprecation + | breaking + | doc + | packaging + | contrib + | misc + )(.#)?(.rst)? exclude: >- - ^CHANGES/(\.TEMPLATE\.rst|\.gitignore|\d+\.(bugfix|feature|removal|doc|misc)(\.\d+)?(\.rst)?|README\.rst)$ + (?x) + ^ + CHANGES/( + \.gitignore + |(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.( + bugfix + |feature + |deprecation + |breaking + |doc + |packaging + |contrib + |misc + )(\.\d+)?(\.rst)? + |README\.rst + |\.TEMPLATE\.rst + ) + $ files: ^CHANGES/ - id: changelogs-user-role name: Changelog files should use a non-broken :user:`name` role diff --git a/CHANGES/.TEMPLATE.rst b/CHANGES/.TEMPLATE.rst index a27a1994b53..9334cefd84f 100644 --- a/CHANGES/.TEMPLATE.rst +++ b/CHANGES/.TEMPLATE.rst @@ -11,11 +11,56 @@ {{ underline * definitions[category]['name']|length }} {% if definitions[category]['showcontent'] %} -{% for text, values in sections[section][category].items() %} +{% for text, change_note_refs in sections[section][category].items() %} - {{ text + '\n' }} - {{ values|join(',\n ') + '\n' }} -{% endfor %} + {# + NOTE: Replacing 'e' with 'f' is a hack that prevents Jinja's `int` + NOTE: filter internal implementation from treating the input as an + NOTE: infinite float when it looks like a scientific notation (with a + NOTE: single 'e' char in between digits), raising an `OverflowError`, + NOTE: subsequently. 'f' is still a hex letter so it won't affect the + NOTE: check for whether it's a (short or long) commit hash or not. + Ref: https://github.com/pallets/jinja/issues/1921 + -#} + {%- + set pr_issue_numbers = change_note_refs + | map('lower') + | map('replace', 'e', 'f') + | map('int', default=None) + | select('integer') + | map('string') + | list + -%} + {%- set arbitrary_refs = [] -%} + {%- set commit_refs = [] -%} + {%- with -%} + {%- set commit_ref_candidates = change_note_refs | reject('in', pr_issue_numbers) -%} + {%- for cf in commit_ref_candidates -%} + {%- if cf | length in (7, 8, 40) and cf | int(default=None, base=16) is not none -%} + {%- set _ = commit_refs.append(cf) -%} + {%- else -%} + {%- set _ = arbitrary_refs.append(cf) -%} + {%- endif -%} + {%- endfor -%} + {%- endwith -%} + + {% if pr_issue_numbers -%} + *Related issues and pull requests on GitHub:* + :issue:`{{ pr_issue_numbers | join('`, :issue:`') }}`. + {% endif %} + + {% if commit_refs -%} + *Related commits on GitHub:* + :commit:`{{ commit_refs | join('`, :commit:`') }}`. + {% endif %} + + {% if arbitrary_refs -%} + *Unlinked references:* + {{ arbitrary_refs | join(', ') }}`. + {% endif %} + +{% endfor %} {% else %} - {{ sections[section][category]['']|join(', ') }} @@ -34,3 +79,4 @@ No significant changes. {% endif %} {% endfor %} ---- +{{ '\n' * 2 }} diff --git a/CHANGES/.gitignore b/CHANGES/.gitignore index f935021a8f8..d6409a0dd82 100644 --- a/CHANGES/.gitignore +++ b/CHANGES/.gitignore @@ -1 +1,28 @@ +* +!.TEMPLATE.rst !.gitignore +!README.rst +!*.bugfix +!*.bugfix.rst +!*.bugfix.*.rst +!*.breaking +!*.breaking.rst +!*.breaking.*.rst +!*.contrib +!*.contrib.rst +!*.contrib.*.rst +!*.deprecation +!*.deprecation.rst +!*.deprecation.*.rst +!*.doc +!*.doc.rst +!*.doc.*.rst +!*.feature +!*.feature.rst +!*.feature.*.rst +!*.misc +!*.misc.rst +!*.misc.*.rst +!*.packaging +!*.packaging.rst +!*.packaging.*.rst diff --git a/CHANGES/8066.contrib.rst b/CHANGES/8066.contrib.rst new file mode 100644 index 00000000000..2468018e99b --- /dev/null +++ b/CHANGES/8066.contrib.rst @@ -0,0 +1,21 @@ +The changelog categorization was made clearer. The +contributors can now mark their fragment files more +accurately -- by :user:`webknjaz`. + +The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` diff --git a/CHANGES/8066.packaging.rst b/CHANGES/8066.packaging.rst new file mode 120000 index 00000000000..57cdff225f5 --- /dev/null +++ b/CHANGES/8066.packaging.rst @@ -0,0 +1 @@ +8066.contrib.rst \ No newline at end of file diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 9f619296351..bf467d2bc07 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -43,7 +43,7 @@ with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a PR followed by a dot, then add a patch type, like ``feature``, -``doc``, ``misc`` etc., and add ``.rst`` as a suffix. If you +``doc``, ``contrib`` etc., and add ``.rst`` as a suffix. If you need to add more than one fragment, you may add an optional sequence number (delimited with another period) between the type and the suffix. @@ -51,11 +51,24 @@ and the suffix. In general the name will follow ``..rst`` pattern, where the categories are: -- ``feature``: Any new feature -- ``bugfix``: A bug fix -- ``doc``: A change to the documentation -- ``misc``: Changes internal to the repo like CI, test and build changes -- ``removal``: For deprecations and removals of an existing feature or behavior +- ``bugfix``: A bug fix for something we deemed an improper undesired + behavior that got corrected in the release to match pre-agreed + expectations. +- ``feature``: A new behavior, public APIs. That sort of stuff. +- ``deprecation``: A declaration of future API removals and breaking + changes in behavior. +- ``breaking``: When something public gets removed in a breaking way. + Could be deprecated in an earlier release. +- ``doc``: Notable updates to the documentation structure or build + process. +- ``packaging``: Notes for downstreams about unobvious side effects + and tooling. Changes in the test invocation considerations and + runtime assumptions. +- ``contrib``: Stuff that affects the contributor experience. e.g. + Running tests, building the docs, setting up the development + environment. +- ``misc``: Changes that are hard to assign to any of the above + categories. A pull request may have more than one of these components, for example a code change may introduce a new feature that deprecates an old diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 1523ccd2a65..768d52cfd05 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -17,6 +17,7 @@ Arsenic async asyncio asyncpg +attrs auth autocalculated autodetection @@ -34,6 +35,7 @@ backports BaseEventLoop basename BasicAuth +behaviour BodyPartReader boolean botocore @@ -89,6 +91,7 @@ Cythonize cythonized de deduplicate +defs Dependabot deprecations DER @@ -104,6 +107,7 @@ DNSResolver docstring docstrings DoS +downstreams Dup elasticsearch encodings @@ -313,6 +317,8 @@ Testsuite Tf timestamps TLS +tmp +tmpdir toolbar toplevel towncrier @@ -329,6 +335,7 @@ Unittest unix unsets unstripped +untyped uppercased upstr url diff --git a/pyproject.toml b/pyproject.toml index 1f590d002ef..85d7c87eb34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,12 +5,73 @@ requires = [ build-backend = "setuptools.build_meta" [tool.towncrier] -package = "aiohttp" -filename = "CHANGES.rst" -directory = "CHANGES/" -title_format = "{version} ({project_date})" -template = "CHANGES/.TEMPLATE.rst" -issue_format = "`#{issue} `_" + package = "aiohttp" + filename = "CHANGES.rst" + directory = "CHANGES/" + title_format = "{version} ({project_date})" + template = "CHANGES/.TEMPLATE.rst" + issue_format = "{issue}" + + # NOTE: The types are declared because: + # NOTE: - there is no mechanism to override just the value of + # NOTE: `tool.towncrier.type.misc.showcontent`; + # NOTE: - and, we want to declare extra non-default types for + # NOTE: clarity and flexibility. + + [[tool.towncrier.section]] + path = "" + + [[tool.towncrier.type]] + # Something we deemed an improper undesired behavior that got corrected + # in the release to match pre-agreed expectations. + directory = "bugfix" + name = "Bug fixes" + showcontent = true + + [[tool.towncrier.type]] + # New behaviors, public APIs. That sort of stuff. + directory = "feature" + name = "Features" + showcontent = true + + [[tool.towncrier.type]] + # Declarations of future API removals and breaking changes in behavior. + directory = "deprecation" + name = "Deprecations (removal in next major release)" + showcontent = true + + [[tool.towncrier.type]] + # When something public gets removed in a breaking way. Could be + # deprecated in an earlier release. + directory = "breaking" + name = "Removals and backward incompatible breaking changes" + showcontent = true + + [[tool.towncrier.type]] + # Notable updates to the documentation structure or build process. + directory = "doc" + name = "Improved documentation" + showcontent = true + + [[tool.towncrier.type]] + # Notes for downstreams about unobvious side effects and tooling. Changes + # in the test invocation considerations and runtime assumptions. + directory = "packaging" + name = "Packaging updates and notes for downstreams" + showcontent = true + + [[tool.towncrier.type]] + # Stuff that affects the contributor experience. e.g. Running tests, + # building the docs, setting up the development environment. + directory = "contrib" + name = "Contributor-facing changes" + showcontent = true + + [[tool.towncrier.type]] + # Changes that are hard to assign to any of the above categories. + directory = "misc" + name = "Miscellaneous internal changes" + showcontent = true [tool.cibuildwheel] diff --git a/tools/check_changes.py b/tools/check_changes.py index da806e014f3..118d1182b9a 100755 --- a/tools/check_changes.py +++ b/tools/check_changes.py @@ -4,8 +4,21 @@ import sys from pathlib import Path -ALLOWED_SUFFIXES = ["feature", "bugfix", "doc", "removal", "misc"] -PATTERN = re.compile(r"\d+\.(" + "|".join(ALLOWED_SUFFIXES) + r")(\.\d+)?(\.rst)?") +ALLOWED_SUFFIXES = ( + "bugfix", + "feature", + "deprecation", + "breaking", + "doc", + "packaging", + "contrib", + "misc", +) +PATTERN = re.compile( + r"(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.(" + + "|".join(ALLOWED_SUFFIXES) + + r")(\.\d+)?(\.rst)?", +) def get_root(script_path): diff --git a/tools/cleanup_changes.py b/tools/cleanup_changes.py index 673866b8d67..5b931138056 100755 --- a/tools/cleanup_changes.py +++ b/tools/cleanup_changes.py @@ -7,8 +7,21 @@ import subprocess from pathlib import Path -ALLOWED_SUFFIXES = ["feature", "bugfix", "doc", "removal", "misc"] -PATTERN = re.compile(r"(\d+)\.(" + "|".join(ALLOWED_SUFFIXES) + r")(\.\d+)?(\.rst)?") +ALLOWED_SUFFIXES = ( + "bugfix", + "feature", + "deprecation", + "breaking", + "doc", + "packaging", + "contrib", + "misc", +) +PATTERN = re.compile( + r"(\d+|[0-9a-f]{8}|[0-9a-f]{7}|[0-9a-f]{40})\.(" + + "|".join(ALLOWED_SUFFIXES) + + r")(\.\d+)?(\.rst)?", +) def main(): @@ -18,9 +31,10 @@ def main(): for fname in (root / "CHANGES").iterdir(): match = PATTERN.match(fname.name) if match is not None: - num = match.group(1) - tst = f"`#{num} `_" - if tst in changes: + commit_issue_or_pr = match.group(1) + tst_issue_or_pr = f":issue:`{commit_issue_or_pr}`" + tst_commit = f":commit:`{commit_issue_or_pr}`" + if tst_issue_or_pr in changes or tst_commit in changes: subprocess.run(["git", "rm", fname]) delete.append(fname.name) print("Deleted CHANGES records:", " ".join(delete)) From 3ec4fa1f0e0a0dad218c75dbe5ed09e22d5cc284 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 04:10:16 +0100 Subject: [PATCH 042/144] =?UTF-8?q?[PR=20#8069/69bbe874=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=93=9D=20Only=20show=20changelog=20draft=20for=20non-r?= =?UTF-8?q?eleases=20(#8070)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8069 as merged into master (69bbe87400f826595d0f87bb2233cb1dae24ac84).** ## What do these changes do? $sbj. ## Are there changes in behavior for the user? RTD for tagged docs builds won't show the changelog draft (which should be empty in this context). ## Related issue number N/A ## Checklist - [x] I think the code is well written - [x] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [x] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/8067.doc.rst | 3 +++ docs/changes.rst | 13 ++++++++----- docs/conf.py | 6 ++++++ 3 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 CHANGES/8067.doc.rst diff --git a/CHANGES/8067.doc.rst b/CHANGES/8067.doc.rst new file mode 100644 index 00000000000..3206db9ae87 --- /dev/null +++ b/CHANGES/8067.doc.rst @@ -0,0 +1,3 @@ +The Sphinx setup was updated to avoid showing the empty +changelog draft section in the tagged release documentation +builds on Read The Docs -- by :user:`webknjaz`. diff --git a/docs/changes.rst b/docs/changes.rst index 6a61dfbcc1e..089f67235a1 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -4,14 +4,17 @@ Changelog ========= -To be included in v\ |release| (if present) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. only:: not is_release -.. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] + To be included in v\ |release| (if present) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Released versions -^^^^^^^^^^^^^^^^^ + .. towncrier-draft-entries:: |release| [UNRELEASED DRAFT] + + Released versions + ^^^^^^^^^^^^^^^^^ .. include:: ../CHANGES.rst + :start-after: .. towncrier release notes start .. include:: ../HISTORY.rst diff --git a/docs/conf.py b/docs/conf.py index d3e04f3d48e..f21366fb488 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,6 +18,12 @@ from pathlib import Path PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve() +IS_RELEASE_ON_RTD = ( + os.getenv("READTHEDOCS", "False") == "True" + and os.environ["READTHEDOCS_VERSION_TYPE"] == "tag" +) +if IS_RELEASE_ON_RTD: + tags.add("is_release") _docs_path = os.path.dirname(__file__) _version_path = os.path.abspath( From 0d945d1be08f2ba8475513216a66411f053c3217 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 15:22:36 +0000 Subject: [PATCH 043/144] [PR #7916/822fbc74 backport][3.9] Add more information to contributing page (#8072) **This is a backport of PR #7916 as merged into master (822fbc7431f3c5522d3e587ad0b658bef8b6a0ab).** Co-authored-by: Sam Bull --- CHANGES/7916.doc | 1 + CHANGES/README.rst | 29 +- docs/_static/img/contributing-cov-comment.svg | 55 ++ docs/_static/img/contributing-cov-header.svg | 15 + docs/_static/img/contributing-cov-miss.svg | 709 ++++++++++++++++++ docs/_static/img/contributing-cov-partial.svg | 268 +++++++ docs/contributing.rst | 124 +-- docs/spelling_wordlist.txt | 1 + 8 files changed, 1137 insertions(+), 65 deletions(-) create mode 100644 CHANGES/7916.doc create mode 100644 docs/_static/img/contributing-cov-comment.svg create mode 100644 docs/_static/img/contributing-cov-header.svg create mode 100644 docs/_static/img/contributing-cov-miss.svg create mode 100644 docs/_static/img/contributing-cov-partial.svg diff --git a/CHANGES/7916.doc b/CHANGES/7916.doc new file mode 100644 index 00000000000..b616ae85bbe --- /dev/null +++ b/CHANGES/7916.doc @@ -0,0 +1 @@ +Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/README.rst b/CHANGES/README.rst index bf467d2bc07..5beb8999226 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,7 +1,15 @@ -.. _Adding change notes with your PRs: +.. _Making a pull request: + +Making a pull request +===================== + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. Adding change notes with your PRs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------- It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -9,7 +17,7 @@ end-users. This is why we enforce collection of the change fragment files in pull requests as per `Towncrier philosophy`_. The idea is that when somebody makes a change, they must record -the bits that would affect end-users only including information +the bits that would affect end-users, only including information that would be useful to them. Then, when the maintainers publish a new release, they'll automatically use these records to compose a change log for the respective version. It is important to @@ -19,7 +27,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +--------------------------------------- ``aiohttp`` uses `towncrier `_ for changelog management. @@ -34,11 +42,14 @@ for the users to understand what it means. combined with others, it will be a part of the "news digest" telling the readers **what changed** in a specific version of the library *since the previous version*. You should also use -reStructuredText syntax for highlighting code (inline or block), +*reStructuredText* syntax for highlighting code (inline or block), linking parts of the docs or external sites. -If you wish to sign your change, feel free to add ``-- by -:user:`github-username``` at the end (replace ``github-username`` -with your own!). +However, you do not need to reference the issue or PR numbers here +as *towncrier* will automatically add a reference to all of the +affected issues when rendering the news file. +If you wish to sign your change, feel free to add +``-- by :user:`github-username``` at the end (replace +``github-username`` with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -77,7 +88,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +----------------------------------------------------------- File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/_static/img/contributing-cov-comment.svg b/docs/_static/img/contributing-cov-comment.svg new file mode 100644 index 00000000000..c5ba1005641 --- /dev/null +++ b/docs/_static/img/contributing-cov-comment.svg @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + Hits 31428 31504 +76 + + + + + + + + + + + + - + + + + Misses 632 633 +1 + + + + + + + + + + + + - + + + + Partials 203 205 +2 + + + diff --git a/docs/_static/img/contributing-cov-header.svg b/docs/_static/img/contributing-cov-header.svg new file mode 100644 index 00000000000..f51c8957cd1 --- /dev/null +++ b/docs/_static/img/contributing-cov-header.svg @@ -0,0 +1,15 @@ + + + + + + + + + Codecov + + + + Report + + diff --git a/docs/_static/img/contributing-cov-miss.svg b/docs/_static/img/contributing-cov-miss.svg new file mode 100644 index 00000000000..d431cd0f1fc --- /dev/null +++ b/docs/_static/img/contributing-cov-miss.svg @@ -0,0 +1,709 @@ + + + + + + + + + + + + + + 733 + + + + + + + + + + + + + + 740 + + + + + + + + + + + + + + + + + + async + + + + + + + + + + def + + + + + + + + + + resolve + + + + + ( + + + + + self + + + + + , + + + + + request + + + + + : + + + + + Request + + + + + ) + + + + + + + + + + - + + + + + > + + + + + _Resolve + + + + + : + + + + + + + + + + + 15 + + + + + + + + + + + + + + + + + 734 + + + + + + + + + + + + + + 741 + + + + + + + + + + + + + + + + + + + + + + + if + + + + + + + + + + ( + + + + + + + + + ! + + + + + + + + + + + + + + + + + + + + + + 735 + + + + + + + + + + + + + 742 + + + + + + + + + + + + + + + not + + + + + request + + + + + . + + + + + url + + + + + . + + + + + raw_path + + + + + . + + + + + startswith + + + + + ( + + + + + self + + + + + . + + + + + _prefix2 + + + + + ) + + + + + + + + + + + + + + + + + + 736 + + + + + + + + + + + + + 743 + + + + + + + + + + + + + + + and + + + + + request + + + + + . + + + + + url + + + + + . + + + + + raw_path + + + + + != + + + + + self + + + + + . + + + + + _prefix + + + + + + + + + + + + + + + + + 737 + + + + + + + + + + + + + 744 + + + + + + + + + + + + + + + ) + + + + + : + + + + + + + + + + + + + + + + + + + 738 + + + + + + + + + + + + + + 745 + + + + + + + + + + + + + + + + + + + + + + + return + + + + + + + + + + None + + + + + , + + + + + + + + + + set + + + + + ( + + + + + ) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 739 + + + + + + + + + + + + + + 746 + + + + + + + + + + + + + + + + + + match_info + + + + + = + + + + + + + + + + await + + + + + self + + + + + . + + + + + _app + + + + + . + + + + + router + + + + + . + + + + + resolve + + + + + ( + + + + + request + + + + + ) + + + + + + + + + + + 15 + + + + + + + diff --git a/docs/_static/img/contributing-cov-partial.svg b/docs/_static/img/contributing-cov-partial.svg new file mode 100644 index 00000000000..5eceb26b9eb --- /dev/null +++ b/docs/_static/img/contributing-cov-partial.svg @@ -0,0 +1,268 @@ + + + + + + + + + + + + + + 1001 + + + + + + + + + + + + + + + + + + + + url_part + + + + + = + + + + + request + + + + + . + + + + + rel_url + + + + + . + + + + + raw_path + + + + + + + + + + 15 + + + + + + + + + + + + + + + + + 1002 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + while + + + + + url_part + + + + + : + + + + + + + + + ! + + + + + + + + + + + + + + + + + + + + + + + 1003 + + + + + + + + + + + + + + + + + + + + + + + + + for + + + + + candidate + + + + + in + + + + + resource_index + + + + + . + + + + + get + + + + + ( + + + + + url_part + + + + + , + + + + + + + + + + ( + + + + + ) + + + + + ) + + + + + : + + + + + + + + + + + 15 + + + + + + + diff --git a/docs/contributing.rst b/docs/contributing.rst index 5263f4a3f47..84d223d0e0b 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -============ +************ (:doc:`contributing-admins`) Instructions for contributors ------------------------------ +============================= In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite --------------------------------------------- +============================================ We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter ------------------ +================= The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite ----------------------- +====================== After all the preconditions are met you can run tests typing the next command: @@ -158,35 +158,75 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions -Tests coverage --------------- +Code coverage +============= -We are trying hard to have good test coverage; please don't make it worse. +We use *codecov.io* as an indispensable tool for analyzing our coverage +results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage +reports for the master branch, history, pull requests etc. -Use: +We'll use an example from a real PR to demonstrate how we use this. +Once the tests run in a PR, you'll see a comment posted by *codecov*. +The most important thing to check here is whether there are any new +missed or partial lines in the report: -.. code-block:: shell +.. image:: _static/img/contributing-cov-comment.svg + +Here, the PR has introduced 1 miss and 2 partials. Now we +click the link in the comment header to open the full report: + +.. image:: _static/img/contributing-cov-header.svg + :alt: Codecov report + +Now, if we look through the diff under 'Files changed' we find one of +our partials: - $ make cov-dev +.. image:: _static/img/contributing-cov-partial.svg + :alt: A while loop with partial coverage. -to run test suite and collect coverage information. Once the command -has finished check your coverage at the file that appears in the last -line of the output: -``open file:///.../aiohttp/htmlcov/index.html`` +In this case, the while loop is never skipped in our tests. This is +probably not worth writing a test for (and may be a situation that is +impossible to trigger anyway), so we leave this alone. -Please go to the link and make sure that your code change is covered. +We're still missing a partial and a miss, so we switch to the +'Indirect changes' tab and take a look through the diff there. This +time we find the remaining 2 lines: +.. image:: _static/img/contributing-cov-miss.svg + :alt: An if statement that isn't covered anymore. -The project uses *codecov.io* for storing coverage results. Visit -https://codecov.io/gh/aio-libs/aiohttp for looking on coverage of -master branch, history, pull requests etc. +After reviewing the PR, we find that this code is no longer needed as +the changes mean that this method will never be called under those +conditions. Thanks to this report, we were able to remove some +redundant code from a performance-critical part of our codebase (this +check would have been run, probably multiple times, for every single +incoming request). + +.. tip:: + Sometimes the diff on *codecov.io* doesn't make sense. This is usually + caused by the branch being out of sync with master. Try merging + master into the branch and it will likely fix the issue. Failing + that, try checking coverage locally as described in the next section. + +Other tools +----------- The browser extension https://docs.codecov.io/docs/browser-extension -is highly recommended for analyzing the coverage just in *Files -Changed* tab on *GitHub Pull Request* review page. +is also a useful tool for analyzing the coverage directly from *Files +Changed* tab on the *GitHub Pull Request* review page. + + +You can also produce coverage reports locally with ``make cov-dev`` +or just adding ``--cov-report=html`` to ``pytest``. + +This will run the test suite and collect coverage information. Once +finished, coverage results can be view by opening: +```console +$ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html +``` Documentation -------------- +============= We encourage documentation improvements. @@ -202,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking --------------- +============== We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -220,47 +260,19 @@ To run spell checker on Linux box you should install it first: $ sudo apt-get install enchant $ pip install sphinxcontrib-spelling -Changelog update ----------------- - -The ``CHANGES.rst`` file is managed using `towncrier -`_ tool and all non trivial -changes must be accompanied by a news entry. -To add an entry to the news file, first you need to have created an -issue describing the change you want to make. A Pull Request itself -*may* function as such, but it is preferred to have a dedicated issue -(for example, in case the PR ends up rejected due to code quality -reasons). - -Once you have an issue or pull request, you take the number and you -create a file inside of the ``CHANGES/`` directory named after that -issue number with an extension of ``.removal``, ``.feature``, -``.bugfix``, or ``.doc``. Thus if your issue or PR number is ``1234`` and -this change is fixing a bug, then you would create a file -``CHANGES/1234.bugfix``. PRs can span multiple categories by creating -multiple files (for instance, if you added a feature and -deprecated/removed the old feature at the same time, you would create -``CHANGES/NNNN.feature`` and ``CHANGES/NNNN.removal``). Likewise if a PR touches -multiple issues/PRs you may create a file for each of them with the -exact same contents and *Towncrier* will deduplicate them. - -The contents of this file are *reStructuredText* formatted text that -will be used as the content of the news file entry. You do not need to -reference the issue or PR numbers here as *towncrier* will automatically -add a reference to all of the affected issues when rendering the news -file. +.. include:: ../CHANGES/README.rst Making a Pull Request ---------------------- +===================== After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting ------------ +=========== All Pull Requests are created against *master* git branch. @@ -301,7 +313,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer ----------------------------------- +================================== Contribute! diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 768d52cfd05..514477e8fcb 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -67,6 +67,7 @@ CIMultiDict ClientSession cls cmd +codebase codec Codings committer From d33bc21414e283c9e6fe7f6caf69e2ed60d66c82 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 17:09:58 +0000 Subject: [PATCH 044/144] Improve validation in HTTP parser (#8074) (#8078) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Paul J. Dorn Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) (cherry picked from commit 33ccdfb0a12690af5bb49bda2319ec0907fa7827) --- CHANGES/8074.bugfix.rst | 5 ++ CONTRIBUTORS.txt | 1 + aiohttp/http_parser.py | 32 +++++---- tests/test_http_parser.py | 139 +++++++++++++++++++++++++++++++++++++- 4 files changed, 160 insertions(+), 17 deletions(-) create mode 100644 CHANGES/8074.bugfix.rst diff --git a/CHANGES/8074.bugfix.rst b/CHANGES/8074.bugfix.rst new file mode 100644 index 00000000000..16c71445476 --- /dev/null +++ b/CHANGES/8074.bugfix.rst @@ -0,0 +1,5 @@ +Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + +Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. Invalid header field names containing question mark or slash are now rejected. Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + +(BACKWARD INCOMPATIBLE) diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index e94381dcf28..3062bf5811e 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -255,6 +255,7 @@ Pankaj Pandey Parag Jain Pau Freixes Paul Colomiets +Paul J. Dorn Paulius Šileikis Paulus Schoutsen Pavel Kamaev diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 85499177701..1877f558308 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -69,12 +69,11 @@ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / # "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA # token = 1*tchar -METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+") -VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d).(\d)") -HDRRE: Final[Pattern[bytes]] = re.compile( - rb"[\x00-\x1F\x7F-\xFF()<>@,;:\[\]={} \t\"\\]" -) -HEXDIGIT = re.compile(rb"[0-9a-fA-F]+") +_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") +TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) +DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) +HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") class RawRequestMessage(NamedTuple): @@ -136,6 +135,7 @@ def parse_headers( self, lines: List[bytes] ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: headers: CIMultiDict[str] = CIMultiDict() + # note: "raw" does not mean inclusion of OWS before/after the field value raw_headers = [] lines_idx = 1 @@ -149,13 +149,14 @@ def parse_headers( except ValueError: raise InvalidHeader(line) from None + if len(bname) == 0: + raise InvalidHeader(bname) + # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} raise InvalidHeader(line) bvalue = bvalue.lstrip(b" \t") - if HDRRE.search(bname): - raise InvalidHeader(bname) if len(bname) > self.max_field_size: raise LineTooLong( "request header name {}".format( @@ -164,6 +165,9 @@ def parse_headers( str(self.max_field_size), str(len(bname)), ) + name = bname.decode("utf-8", "surrogateescape") + if not TOKENRE.fullmatch(name): + raise InvalidHeader(bname) header_length = len(bvalue) @@ -210,7 +214,6 @@ def parse_headers( ) bvalue = bvalue.strip(b" \t") - name = bname.decode("utf-8", "surrogateescape") value = bvalue.decode("utf-8", "surrogateescape") # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 @@ -338,7 +341,8 @@ def get_content_length() -> Optional[int]: # Shouldn't allow +/- or other number formats. # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 - if not length_hdr.strip(" \t").isdecimal(): + # msg.headers is already stripped of leading/trailing wsp + if not DIGITS.fullmatch(length_hdr): raise InvalidHeader(CONTENT_LENGTH) return int(length_hdr) @@ -566,7 +570,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: ) # method - if not METHRE.fullmatch(method): + if not TOKENRE.fullmatch(method): raise BadStatusLine(method) # version @@ -683,8 +687,8 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: raise BadStatusLine(line) version_o = HttpVersion(int(match.group(1)), int(match.group(2))) - # The status code is a three-digit number - if len(status) != 3 or not status.isdecimal(): + # The status code is a three-digit ASCII number, no padding + if len(status) != 3 or not DIGITS.fullmatch(status): raise BadStatusLine(line) status_i = int(status) @@ -826,7 +830,7 @@ def feed_data( if self._lax: # Allow whitespace in lax mode. size_b = size_b.strip() - if not re.fullmatch(HEXDIGIT, size_b): + if not re.fullmatch(HEXDIGITS, size_b): exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 820a76cb821..b931730529d 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -2,7 +2,8 @@ import asyncio import re -from typing import Any, List +from contextlib import nullcontext +from typing import Any, Dict, List from unittest import mock from urllib.parse import quote @@ -169,11 +170,27 @@ def test_cve_2023_37276(parser: Any) -> None: parser.feed_data(text) +@pytest.mark.parametrize( + "rfc9110_5_6_2_token_delim", + r'"(),/:;<=>?@[\]{}', +) +def test_bad_header_name(parser: Any, rfc9110_5_6_2_token_delim: str) -> None: + text = f"POST / HTTP/1.1\r\nhead{rfc9110_5_6_2_token_delim}er: val\r\n\r\n".encode() + expectation = pytest.raises(http_exceptions.BadHttpMessage) + if rfc9110_5_6_2_token_delim == ":": + # Inserting colon into header just splits name/value earlier. + expectation = nullcontext() + with expectation: + parser.feed_data(text) + + @pytest.mark.parametrize( "hdr", ( "Content-Length: -5", # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length "Content-Length: +256", + "Content-Length: \N{superscript one}", + "Content-Length: \N{mathematical double-struck digit one}", "Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 "Bar: abc\ndef", "Baz: abc\x00def", @@ -266,6 +283,20 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) +def test_parse_unusual_request_line(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + text = b"#smol //a HTTP/1.3\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + assert len(messages) == 1 + msg, _ = messages[0] + assert msg.compression is None + assert not msg.upgrade + assert msg.method == "#smol" + assert msg.path == "//a" + assert msg.version == (1, 3) + + def test_parse(parser) -> None: text = b"GET /test HTTP/1.1\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) @@ -568,6 +599,43 @@ def test_headers_content_length_err_2(parser) -> None: parser.feed_data(text) +_pad: Dict[bytes, str] = { + b"": "empty", + # not a typo. Python likes triple zero + b"\000": "NUL", + b" ": "SP", + b" ": "SPSP", + # not a typo: both 0xa0 and 0x0a in case of 8-bit fun + b"\n": "LF", + b"\xa0": "NBSP", + b"\t ": "TABSP", +} + + +@pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) +@pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) +@pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) +def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: + text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) + expectation = pytest.raises(http_exceptions.BadHttpMessage) + if pad1 == pad2 == b"" and hdr != b"": + # one entry in param matrix is correct: non-empty name, not padded + expectation = nullcontext() + if pad1 == pad2 == hdr == b"": + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + with expectation: + parser.feed_data(text) + + +def test_empty_header_name(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + def test_invalid_header(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"test line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): @@ -690,6 +758,34 @@ def test_http_request_bad_status_line(parser) -> None: assert r"\n" not in exc_info.value.message +_num: Dict[bytes, str] = { + # dangerous: accepted by Python int() + # unicodedata.category("\U0001D7D9") == 'Nd' + "\N{mathematical double-struck digit one}".encode(): "utf8digit", + # only added for interop tests, refused by Python int() + # unicodedata.category("\U000000B9") == 'No' + "\N{superscript one}".encode(): "utf8number", + "\N{superscript one}".encode("latin-1"): "latin1number", +} + + +@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values()) +def test_http_request_bad_status_line_number( + parser: Any, nonascii_digit: bytes +) -> None: + text = b"GET /digit HTTP/1." + nonascii_digit + b"\r\n\r\n" + with pytest.raises(http_exceptions.BadStatusLine): + parser.feed_data(text) + + +def test_http_request_bad_status_line_separator(parser: Any) -> None: + # single code point, old, multibyte NFKC, multibyte NFKD + utf8sep = "\N{arabic ligature sallallahou alayhe wasallam}".encode() + text = b"GET /ligature HTTP/1" + utf8sep + b"1\r\n\r\n" + with pytest.raises(http_exceptions.BadStatusLine): + parser.feed_data(text) + + def test_http_request_bad_status_line_whitespace(parser: Any) -> None: text = b"GET\n/path\fHTTP/1.1\r\n\r\n" with pytest.raises(http_exceptions.BadStatusLine): @@ -711,6 +807,31 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +def test_http_request_parser_utf8_request_line(parser) -> None: + if not isinstance(response, HttpResponseParserPy): + pytest.xfail("Regression test for Py parser. May match C behaviour later.") + messages, upgrade, tail = parser.feed_data( + # note the truncated unicode sequence + b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" + + # for easier grep: ASCII 0xA0 more commonly known as non-breaking space + # note the leading and trailing spaces + "sTeP: \N{latin small letter sharp s}nek\t\N{no-break space} " + "\r\n\r\n".encode() + ) + msg = messages[0][0] + + assert msg.method == "GET" + assert msg.path == "/Pünktchen\udca0\udcef\udcb7" + assert msg.version == (1, 1) + assert msg.headers == CIMultiDict([("STEP", "ßnek\t\xa0")]) + assert msg.raw_headers == ((b"sTeP", "ßnek\t\xa0".encode()),) + assert not msg.should_close + assert msg.compression is None + assert not msg.upgrade + assert not msg.chunked + assert msg.url.path == URL("/P%C3%BCnktchen\udca0\udcef\udcb7").path + + def test_http_request_parser_utf8(parser) -> None: text = "GET /path HTTP/1.1\r\nx-test:тест\r\n\r\n".encode() messages, upgrade, tail = parser.feed_data(text) @@ -760,9 +881,15 @@ def test_http_request_parser_two_slashes(parser) -> None: assert not msg.chunked -def test_http_request_parser_bad_method(parser) -> None: +@pytest.mark.parametrize( + "rfc9110_5_6_2_token_delim", + [bytes([i]) for i in rb'"(),/:;<=>?@[\]{}'], +) +def test_http_request_parser_bad_method( + parser, rfc9110_5_6_2_token_delim: bytes +) -> None: with pytest.raises(http_exceptions.BadStatusLine): - parser.feed_data(b'G=":<>(e),[T];?" /get HTTP/1.1\r\n\r\n') + parser.feed_data(rfc9110_5_6_2_token_delim + b'ET" /get HTTP/1.1\r\n\r\n') def test_http_request_parser_bad_version(parser) -> None: @@ -974,6 +1101,12 @@ def test_http_response_parser_code_not_int(response) -> None: response.feed_data(b"HTTP/1.1 ttt test\r\n\r\n") +@pytest.mark.parametrize("nonascii_digit", _num.keys(), ids=_num.values()) +def test_http_response_parser_code_not_ascii(response, nonascii_digit: bytes) -> None: + with pytest.raises(http_exceptions.BadStatusLine): + response.feed_data(b"HTTP/1.1 20" + nonascii_digit + b" test\r\n\r\n") + + def test_http_request_chunked_payload(parser) -> None: text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] From 435ad46e6c26cbf6ed9a38764e9ba8e7441a0e3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 18:22:12 +0100 Subject: [PATCH 045/144] [PR #3955/8960063e backport][3.9] Replace all tmpdir fixtures with tmp_path (#3551) (#8076) **This is a backport of PR #3955 as merged into master (8960063ef4137d6c547a687a45ed55b943e9b8d1).** tmp_path is the replacement fixture in pytest for tmpdir; tmp_path uses the builtin pathlib.Path class. As it says on the tin, this commit replaces every instance of tmpdir in the test suite with tmp_path. Aside from s/tmpdir/tmp_path/ this also required changing instances of `tmpdir.join(foo)` to `tmp_path / foo`. This is intended to comprehensively address and close #3551, and should have no side effects. This does not affect end users. Co-authored-by: Matt VanEseltine --- CHANGES/3551.misc | 1 + CONTRIBUTORS.txt | 1 + tests/test_client_request.py | 4 ++-- tests/test_proxy_functional.py | 12 ++++++------ tests/test_web_functional.py | 6 +++--- tests/test_web_sendfile_functional.py | 22 +++++++++++----------- tests/test_web_urldispatcher.py | 4 ++-- 7 files changed, 26 insertions(+), 24 deletions(-) create mode 100644 CHANGES/3551.misc diff --git a/CHANGES/3551.misc b/CHANGES/3551.misc new file mode 100644 index 00000000000..63965c14821 --- /dev/null +++ b/CHANGES/3551.misc @@ -0,0 +1 @@ +Replace all tmpdir fixtures with tmp_path in test suite. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 3062bf5811e..64b6f575f26 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -225,6 +225,7 @@ Martin Richard Martin Sucha Mathias Fröjdman Mathieu Dugré +Matt VanEseltine Matthias Marquardt Matthieu Hauglustaine Matthieu Rigal diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 6521b70ad55..f8107ffad88 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -762,8 +762,8 @@ async def test_pass_falsy_data(loop) -> None: await req.close() -async def test_pass_falsy_data_file(loop, tmpdir) -> None: - testfile = tmpdir.join("tmpfile").open("w+b") +async def test_pass_falsy_data_file(loop, tmp_path) -> None: + testfile = (tmp_path / "tmpfile").open("w+b") testfile.write(b"data") testfile.seek(0) skip = frozenset([hdrs.CONTENT_TYPE]) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index de5eeb258ff..f199404f159 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -721,12 +721,12 @@ async def test_proxy_from_env_http_with_auth(proxy_test_server, get_request, moc async def test_proxy_from_env_http_with_auth_from_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" netrc_file_data = "machine 127.0.0.1 login {} password {}".format( auth.login, auth.password, @@ -747,12 +747,12 @@ async def test_proxy_from_env_http_with_auth_from_netrc( async def test_proxy_from_env_http_without_auth_from_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" netrc_file_data = "machine 127.0.0.2 login {} password {}".format( auth.login, auth.password, @@ -773,12 +773,12 @@ async def test_proxy_from_env_http_without_auth_from_netrc( async def test_proxy_from_env_http_without_auth_from_wrong_netrc( - proxy_test_server, get_request, tmpdir, mocker + proxy_test_server, get_request, tmp_path, mocker ): url = "http://aiohttp.io/path" proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") - netrc_file = tmpdir.join("test_netrc") + netrc_file = tmp_path / "test_netrc" invalid_data = f"machine 127.0.0.1 {auth.login} pass {auth.password}" with open(str(netrc_file), "w") as f: f.write(invalid_data) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 28d97d9694c..04fc2e35fd1 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -1786,7 +1786,7 @@ async def handler(request): await resp.release() -async def test_response_with_bodypart_named(aiohttp_client, tmpdir) -> None: +async def test_response_with_bodypart_named(aiohttp_client, tmp_path) -> None: async def handler(request): reader = await request.multipart() part = await reader.next() @@ -1796,9 +1796,9 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - f = tmpdir.join("foobar.txt") + f = tmp_path / "foobar.txt" f.write_text("test", encoding="utf8") - with open(str(f), "rb") as fd: + with f.open("rb") as fd: data = {"file": fd} resp = await client.post("/", data=data) diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 31f22892f66..b044f29bc81 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -558,18 +558,18 @@ def test_static_route_path_existence_check() -> None: web.StaticResource("/", nodirectory) -async def test_static_file_huge(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 20MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) - file_st = os.stat(str(tmpdir.join(filename))) + file_st = os.stat(str(tmp_path / filename)) app = web.Application() - app.router.add_static("/static", str(tmpdir)) + app.router.add_static("/static", str(tmp_path)) client = await aiohttp_client(app) resp = await client.get("/static/" + filename) @@ -579,7 +579,7 @@ async def test_static_file_huge(aiohttp_client, tmpdir) -> None: assert resp.headers.get("CONTENT-ENCODING") is None assert int(resp.headers.get("CONTENT-LENGTH")) == file_st.st_size - f = tmpdir.join(filename).open("rb") + f = (tmp_path / filename).open("rb") off = 0 cnt = 0 while off < file_st.st_size: @@ -988,11 +988,11 @@ async def handler(request): await client.close() -async def test_static_file_huge_cancel(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge_cancel(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 100MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) @@ -1005,7 +1005,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmpdir.join(filename)))) + ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) return ret app = web.Application() @@ -1029,11 +1029,11 @@ async def handler(request): await client.close() -async def test_static_file_huge_error(aiohttp_client, tmpdir) -> None: +async def test_static_file_huge_error(aiohttp_client, tmp_path) -> None: filename = "huge_data.unknown_mime_type" # fill 20MB file - with tmpdir.join(filename).open("wb") as f: + with (tmp_path / filename).open("wb") as f: f.seek(20 * 1024 * 1024) f.write(b"1") @@ -1042,7 +1042,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmpdir.join(filename)))) + ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) return ret app = web.Application() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 8ca8dcd7b99..772eb92c244 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -526,13 +526,13 @@ async def post(self) -> web.Response: async def test_static_absolute_url( - aiohttp_client: AiohttpClient, tmpdir: pathlib.Path + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path ) -> None: # requested url is an absolute name like # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different app = web.Application() - fname = tmpdir / "file.txt" + fname = tmp_path / "file.txt" fname.write_text("sample text", "ascii") here = pathlib.Path(__file__).parent app.router.add_static("/static", here) From 9118a5831e8a65b8c839eb7e4ac983e040ff41df Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 28 Jan 2024 18:38:58 +0000 Subject: [PATCH 046/144] [PR #8079/1c335944 backport][3.9] Validate static paths (#8080) **This is a backport of PR #8079 as merged into master (1c335944d6a8b1298baf179b7c0b3069f10c514b).** --- CHANGES/8079.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 18 +++++-- docs/web_advanced.rst | 16 ++++-- docs/web_reference.rst | 12 +++-- tests/test_web_urldispatcher.py | 91 +++++++++++++++++++++++++++++++++ 5 files changed, 128 insertions(+), 10 deletions(-) create mode 100644 CHANGES/8079.bugfix.rst diff --git a/CHANGES/8079.bugfix.rst b/CHANGES/8079.bugfix.rst new file mode 100644 index 00000000000..57bc8bfebcc --- /dev/null +++ b/CHANGES/8079.bugfix.rst @@ -0,0 +1 @@ +Improved validation of paths for static resources -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 0334c8c9b1e..99696533444 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -595,9 +595,14 @@ def url_for( # type: ignore[override] url = url / filename if append_version: + unresolved_path = self._directory.joinpath(filename) try: - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() filepath.relative_to(self._directory) except (ValueError, FileNotFoundError): # ValueError for case when path point to symlink @@ -662,8 +667,13 @@ async def _handle(self, request: Request) -> StreamResponse: # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different raise HTTPForbidden() - filepath = self._directory.joinpath(filename).resolve() - if not self._follow_symlinks: + unresolved_path = self._directory.joinpath(filename) + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() filepath.relative_to(self._directory) except (ValueError, FileNotFoundError) as error: # relatively safe diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 33c2ebf0736..3549a5c7e36 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -263,12 +263,22 @@ instead could be enabled with ``show_index`` parameter set to ``True``:: web.static('/prefix', path_to_static_folder, show_index=True) -When a symlink from the static directory is accessed, the server responses to -client with ``HTTP/404 Not Found`` by default. To allow the server to follow -symlinks, parameter ``follow_symlinks`` should be set to ``True``:: +When a symlink that leads outside the static directory is accessed, the server +responds to the client with ``HTTP/404 Not Found`` by default. To allow the server to +follow symlinks that lead outside the static root, the parameter ``follow_symlinks`` +should be set to ``True``:: web.static('/prefix', path_to_static_folder, follow_symlinks=True) +.. caution:: + + Enabling ``follow_symlinks`` can be a security risk, and may lead to + a directory transversal attack. You do NOT need this option to follow symlinks + which point to somewhere else within the static directory, this option is only + used to break out of the security sandbox. Enabling this option is highly + discouraged, and only expected to be used for edge cases in a local + development setting where remote users do not have access to the server. + When you want to enable cache busting, parameter ``append_version`` can be set to ``True`` diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 1d92678a083..aedac0e54d1 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1875,9 +1875,15 @@ Application and Router by default it's not allowed and HTTP/403 will be returned on directory access. - :param bool follow_symlinks: flag for allowing to follow symlinks from - a directory, by default it's not allowed and - HTTP/404 will be returned on access. + :param bool follow_symlinks: flag for allowing to follow symlinks that lead + outside the static root directory, by default it's not allowed and + HTTP/404 will be returned on access. Enabling ``follow_symlinks`` + can be a security risk, and may lead to a directory transversal attack. + You do NOT need this option to follow symlinks which point to somewhere + else within the static directory, this option is only used to break out + of the security sandbox. Enabling this option is highly discouraged, + and only expected to be used for edge cases in a local development + setting where remote users do not have access to the server. :param bool append_version: flag for adding file version (hash) to the url query string, this value will diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 772eb92c244..aee3ecd5c24 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -130,6 +130,97 @@ async def test_follow_symlink( assert (await r.text()) == data +async def test_follow_symlink_directory_traversal( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + # Tests that follow_symlinks does not allow directory transversal + data = "private" + + private_file = tmp_path / "private_file" + private_file.write_text(data) + + safe_path = tmp_path / "safe_dir" + safe_path.mkdir() + + app = web.Application() + + # Register global static route: + app.router.add_static("/", str(safe_path), follow_symlinks=True) + client = await aiohttp_client(app) + + await client.start_server() + # We need to use a raw socket to test this, as the client will normalize + # the path before sending it to the server. + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /../private_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"404 Not Found" in response + writer.close() + await writer.wait_closed() + await client.close() + + +async def test_follow_symlink_directory_traversal_after_normalization( + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient +) -> None: + # Tests that follow_symlinks does not allow directory transversal + # after normalization + # + # Directory structure + # |-- secret_dir + # | |-- private_file (should never be accessible) + # | |-- symlink_target_dir + # | |-- symlink_target_file (should be accessible via the my_symlink symlink) + # | |-- sandbox_dir + # | |-- my_symlink -> symlink_target_dir + # + secret_path = tmp_path / "secret_dir" + secret_path.mkdir() + + # This file is below the symlink target and should not be reachable + private_file = secret_path / "private_file" + private_file.write_text("private") + + symlink_target_path = secret_path / "symlink_target_dir" + symlink_target_path.mkdir() + + sandbox_path = symlink_target_path / "sandbox_dir" + sandbox_path.mkdir() + + # This file should be reachable via the symlink + symlink_target_file = symlink_target_path / "symlink_target_file" + symlink_target_file.write_text("readable") + + my_symlink_path = sandbox_path / "my_symlink" + pathlib.Path(str(my_symlink_path)).symlink_to(str(symlink_target_path), True) + + app = web.Application() + + # Register global static route: + app.router.add_static("/", str(sandbox_path), follow_symlinks=True) + client = await aiohttp_client(app) + + await client.start_server() + # We need to use a raw socket to test this, as the client will normalize + # the path before sending it to the server. + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /my_symlink/../private_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"404 Not Found" in response + writer.close() + await writer.wait_closed() + + reader, writer = await asyncio.open_connection(client.host, client.port) + writer.write(b"GET /my_symlink/symlink_target_file HTTP/1.1\r\n\r\n") + response = await reader.readuntil(b"\r\n\r\n") + assert b"200 OK" in response + response = await reader.readuntil(b"readable") + assert response == b"readable" + writer.close() + await writer.wait_closed() + await client.close() + + @pytest.mark.parametrize( "dir_name,filename,data", [ From 24a6d64966d99182e95f5d3a29541ef2fec397ad Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 20:38:09 +0000 Subject: [PATCH 047/144] Release v3.9.2 (#8082) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES.rst | 196 +++++++++++++++++++++++++++++++++++++ CHANGES/3551.misc | 1 - CHANGES/7698.feature | 1 - CHANGES/7916.doc | 1 - CHANGES/7978.bugfix | 1 - CHANGES/7995.doc | 1 - CHANGES/8010.doc | 2 - CHANGES/8012.bugfix | 1 - CHANGES/8014.bugfix | 1 - CHANGES/8021.bugfix | 1 - CHANGES/8066.contrib.rst | 21 ---- CHANGES/8066.packaging.rst | 1 - CHANGES/8067.doc.rst | 3 - CHANGES/8074.bugfix.rst | 5 - CHANGES/8079.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 16 files changed, 197 insertions(+), 42 deletions(-) delete mode 100644 CHANGES/3551.misc delete mode 100644 CHANGES/7698.feature delete mode 100644 CHANGES/7916.doc delete mode 100644 CHANGES/7978.bugfix delete mode 100644 CHANGES/7995.doc delete mode 100644 CHANGES/8010.doc delete mode 100644 CHANGES/8012.bugfix delete mode 100644 CHANGES/8014.bugfix delete mode 100644 CHANGES/8021.bugfix delete mode 100644 CHANGES/8066.contrib.rst delete mode 120000 CHANGES/8066.packaging.rst delete mode 100644 CHANGES/8067.doc.rst delete mode 100644 CHANGES/8074.bugfix.rst delete mode 100644 CHANGES/8079.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 8c2a2707408..c26cc90e76e 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,202 @@ .. towncrier release notes start +3.9.2 (2024-01-28) +================== + +Bug fixes +--------- + +- Fixed server-side websocket connection leak. + + + *Related issues and pull requests on GitHub:* + :issue:`7978`. + + + +- Fixed ``web.FileResponse`` doing blocking I/O in the event loop. + + + *Related issues and pull requests on GitHub:* + :issue:`8012`. + + + +- Fixed double compress when compression enabled and compressed file exists in server file responses. + + + *Related issues and pull requests on GitHub:* + :issue:`8014`. + + + +- Added runtime type check for ``ClientSession`` ``timeout`` parameter. + + + *Related issues and pull requests on GitHub:* + :issue:`8021`. + + + +- Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. + Invalid header field names containing question mark or slash are now rejected. + Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + + + *Related issues and pull requests on GitHub:* + :issue:`8074`. + + + +- Improved validation of paths for static resources requests to the server -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8079`. + + + + +Features +-------- + +- Added support for passing :py:data:`True` to ``ssl`` parameter in ``ClientSession`` while + deprecating :py:data:`None` -- by :user:`xiangyan99`. + + + *Related issues and pull requests on GitHub:* + :issue:`7698`. + + + +Breaking changes +---------------- + +- Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. + Invalid header field names containing question mark or slash are now rejected. + Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. + + + *Related issues and pull requests on GitHub:* + :issue:`8074`. + + + + +Improved documentation +---------------------- + +- Fixed examples of ``fallback_charset_resolver`` function in the :doc:`client_advanced` document. -- by :user:`henry0312`. + + + *Related issues and pull requests on GitHub:* + :issue:`7995`. + + + +- The Sphinx setup was updated to avoid showing the empty + changelog draft section in the tagged release documentation + builds on Read The Docs -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8067`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- The changelog categorization was made clearer. The + contributors can now mark their fragment files more + accurately -- by :user:`webknjaz`. + + The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` + + + *Related issues and pull requests on GitHub:* + :issue:`8066`. + + + + +Contributor-facing changes +-------------------------- + +- Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`7916`. + + + +- The changelog categorization was made clearer. The + contributors can now mark their fragment files more + accurately -- by :user:`webknjaz`. + + The new category tags are: + + * ``bugfix`` + + * ``feature`` + + * ``deprecation`` + + * ``breaking`` (previously, ``removal``) + + * ``doc`` + + * ``packaging`` + + * ``contrib`` + + * ``misc`` + + + *Related issues and pull requests on GitHub:* + :issue:`8066`. + + + + +Miscellaneous internal changes +------------------------------ + +- Replaced all ``tmpdir`` fixtures with ``tmp_path`` in test suite. + + + *Related issues and pull requests on GitHub:* + :issue:`3551`. + + + + +---- + + 3.9.1 (2023-11-26) ================== diff --git a/CHANGES/3551.misc b/CHANGES/3551.misc deleted file mode 100644 index 63965c14821..00000000000 --- a/CHANGES/3551.misc +++ /dev/null @@ -1 +0,0 @@ -Replace all tmpdir fixtures with tmp_path in test suite. diff --git a/CHANGES/7698.feature b/CHANGES/7698.feature deleted file mode 100644 index e8c4b3fb452..00000000000 --- a/CHANGES/7698.feature +++ /dev/null @@ -1 +0,0 @@ -Added support for passing `True` to `ssl` while deprecating `None`. -- by :user:`xiangyan99` diff --git a/CHANGES/7916.doc b/CHANGES/7916.doc deleted file mode 100644 index b616ae85bbe..00000000000 --- a/CHANGES/7916.doc +++ /dev/null @@ -1 +0,0 @@ -Updated :ref:`contributing/Tests coverage ` section to show how we use ``codecov`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/7978.bugfix b/CHANGES/7978.bugfix deleted file mode 100644 index 3c7dc096ca7..00000000000 --- a/CHANGES/7978.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix websocket connection leak diff --git a/CHANGES/7995.doc b/CHANGES/7995.doc deleted file mode 100644 index 70e3dfa5469..00000000000 --- a/CHANGES/7995.doc +++ /dev/null @@ -1 +0,0 @@ -Fix examples of `fallback_charset_resolver` function in client_advanced documentation. -- by :user:`henry0312` diff --git a/CHANGES/8010.doc b/CHANGES/8010.doc deleted file mode 100644 index db1b0aa3225..00000000000 --- a/CHANGES/8010.doc +++ /dev/null @@ -1,2 +0,0 @@ -On the `CHANGES/README.rst `_ page, -a link to the ``Towncrier philosophy`` has been fixed. diff --git a/CHANGES/8012.bugfix b/CHANGES/8012.bugfix deleted file mode 100644 index f5187075f3f..00000000000 --- a/CHANGES/8012.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix `web.FileResponse` doing blocking I/O in the event loop diff --git a/CHANGES/8014.bugfix b/CHANGES/8014.bugfix deleted file mode 100644 index 681bb5966ae..00000000000 --- a/CHANGES/8014.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix double compress when compression enabled and compressed file exists diff --git a/CHANGES/8021.bugfix b/CHANGES/8021.bugfix deleted file mode 100644 index f43843a587f..00000000000 --- a/CHANGES/8021.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add runtime type check for ``ClientSession`` ``timeout`` parameter. diff --git a/CHANGES/8066.contrib.rst b/CHANGES/8066.contrib.rst deleted file mode 100644 index 2468018e99b..00000000000 --- a/CHANGES/8066.contrib.rst +++ /dev/null @@ -1,21 +0,0 @@ -The changelog categorization was made clearer. The -contributors can now mark their fragment files more -accurately -- by :user:`webknjaz`. - -The new category tags are: - - * ``bugfix`` - - * ``feature`` - - * ``deprecation`` - - * ``breaking`` (previously, ``removal``) - - * ``doc`` - - * ``packaging`` - - * ``contrib`` - - * ``misc`` diff --git a/CHANGES/8066.packaging.rst b/CHANGES/8066.packaging.rst deleted file mode 120000 index 57cdff225f5..00000000000 --- a/CHANGES/8066.packaging.rst +++ /dev/null @@ -1 +0,0 @@ -8066.contrib.rst \ No newline at end of file diff --git a/CHANGES/8067.doc.rst b/CHANGES/8067.doc.rst deleted file mode 100644 index 3206db9ae87..00000000000 --- a/CHANGES/8067.doc.rst +++ /dev/null @@ -1,3 +0,0 @@ -The Sphinx setup was updated to avoid showing the empty -changelog draft section in the tagged release documentation -builds on Read The Docs -- by :user:`webknjaz`. diff --git a/CHANGES/8074.bugfix.rst b/CHANGES/8074.bugfix.rst deleted file mode 100644 index 16c71445476..00000000000 --- a/CHANGES/8074.bugfix.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fixed an unhandled exception in the Python HTTP parser on header lines starting with a colon -- by :user:`pajod`. - -Invalid request lines with anything but a dot between the HTTP major and minor version are now rejected. Invalid header field names containing question mark or slash are now rejected. Such requests are incompatible with :rfc:`9110#section-5.6.2` and are not known to be of any legitimate use. - -(BACKWARD INCOMPATIBLE) diff --git a/CHANGES/8079.bugfix.rst b/CHANGES/8079.bugfix.rst deleted file mode 100644 index 57bc8bfebcc..00000000000 --- a/CHANGES/8079.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Improved validation of paths for static resources -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index c0fd9817546..70ddb359640 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.1.dev0" +__version__ = "3.9.2" from typing import TYPE_CHECKING, Tuple From 94462eea445d43bf574ca6321349f67219ce9cb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Sun, 28 Jan 2024 23:10:10 +0100 Subject: [PATCH 048/144] [PR #3957/79fe2045 backport][3.9] Improve test suite handling of paths, temp files (#8084) **This is a backport of PR #3957 as merged into master (79fe204522ecf91e9c1cf1a3547c03f821106a74).** * Improve test suite handling of paths, temp files This updates most uses of `os.path` to instead use `pathlib.Path`. Relatedly, and following up from #3955 (which replaced pytest's `tmpdir` fixture with `tmp_path`), this removes most ad-hoc tempfile creation in favor of the `tmp_path` fixture. Following conversion, unnecessary `os` and `tempfile` imports were removed. Most pathlib changes involve straightforward changes from `os` functions such as `os.mkdir` or `os.path.abspath` to their equivalent methods in `pathlib.Path`. Changing ad-hoc temporary path to `tmp_path` involved removing the `tmp_dir_path` fixture and replacing its functionality with `tmp_path` in `test_save_load` and `test_guess_filename_with_tempfile`. On `test_static_route_user_home` function: * I think that the intention of this test is to ensure that aiohttp correctly expands the home path if passed in a string. I refactored it to `pathlib.Path` and cut out duplication of `relative_to()` calls. But if it's not doing anything but expanding `~`, then it's testing the functionality of `pathlib.Path`, not aiohttp. On `unix_sockname` fixture: This fixture uses `tempfile.TemporaryDirectory`. Because it's a somewhat complicated fixture used across multiple test modules, I left it as-is for now. On `str(tmp_path)` and even `pathlib.Path(str(tmp_path))`: pytest uses `pathlib2` to provide `tmp_path` for Python 3.5 (only). This is mostly fine but it fails on a couple of corner cases, such as `os.symlink()` which blocks all but `str` and `PurePath` via isinstance type checking. In several cases, this requires conversion to string or conversion to string and then into `pathlib.Path` to maintain code compatibility. See: pytest-dev/pytest/issues/5017 * Correct test_guess_filename to use file object * Update symlink in tests; more guess_filename tests (cherry picked from commit 79fe204522ecf91e9c1cf1a3547c03f821106a74) ## What do these changes do? This updates most uses of `os.path` to instead use `pathlib.Path`. Relatedly, and following up from #3955 (which replaced pytest's `tmpdir` fixture with `tmp_path`), this removes most ad-hoc tempfile creation in favor of the `tmp_path` fixture. Following conversion, unnecessary `os` and `tempfile` imports were removed. Most pathlib changes involve straightforward changes from `os` functions such as `os.mkdir` or `os.path.abspath` to their equivalent methods in `pathlib.Path`. Changing ad-hoc temporary path to `tmp_path` involved removing the `tmp_dir_path` fixture and replacing its functionality with `tmp_path` in `test_save_load` and `test_guess_filename_with_tempfile`. On `test_static_route_user_home` function: * I think that the intention of this test is to ensure that aiohttp correctly expands the home path if passed in a string. I refactored it to `pathlib.Path` and cut out duplication of `relative_to()` calls. But if it's not doing anything but expanding `~`, then it's testing the functionality of `pathlib.Path`, not aiohttp. On `unix_sockname` fixture: This fixture uses `tempfile.TemporaryDirectory`. Because it's a somewhat complicated fixture used across multiple test modules, I left it as-is for now. On `str(tmp_path)` and even `pathlib.Path(str(tmp_path))`: pytest uses `pathlib2` to provide `tmp_path` for Python 3.5 (only). This is mostly fine but it fails on a couple of corner cases, such as `os.symlink()` which blocks all but `str` and `PurePath` via isinstance type checking. In several cases, this requires conversion to string or conversion to string and then into `pathlib.Path` to maintain code compatibility. See: pytest-dev/pytest/issues/5017 ## Are there changes in behavior for the user? These changes only affect the test suite and have no impact on the end user. ## Related issue number This is intended to address discussion following the simplistic changes from tmpdir to tmp_path of #3955. ## Checklist - [X] I think the code is well written - [X] Unit tests for the changes exist - [X] Documentation reflects the changes - [X] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [X] Add a new news fragment into the `CHANGES` folder * name it `.` for example (588.bugfix) * if you don't have an `issue_id` change it to the pr id after creating the pr * ensure type is one of the following: * `.feature`: Signifying a new feature. * `.bugfix`: Signifying a bug fix. * `.doc`: Signifying a documentation improvement. * `.removal`: Signifying a deprecation or removal of public API. * `.misc`: A ticket has been closed, but it is not of interest to users. * Make sure to use full sentences with correct case and punctuation, for example: "Fix issue with non-ascii contents in doctest text files." --------- Co-authored-by: Matt VanEseltine Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES/3957.misc | 1 + tests/test_client_request.py | 21 ++--- tests/test_cookiejar.py | 8 +- tests/test_helpers.py | 15 +++- tests/test_multipart.py | 7 +- tests/test_proxy_functional.py | 6 +- tests/test_urldispatch.py | 70 ++++++++-------- tests/test_web_sendfile_functional.py | 36 ++++---- tests/test_web_urldispatcher.py | 113 ++++++++++---------------- tools/check_changes.py | 2 +- 10 files changed, 130 insertions(+), 149 deletions(-) create mode 100644 CHANGES/3957.misc diff --git a/CHANGES/3957.misc b/CHANGES/3957.misc new file mode 100644 index 00000000000..b4f9f58edb9 --- /dev/null +++ b/CHANGES/3957.misc @@ -0,0 +1 @@ +Improve test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. diff --git a/tests/test_client_request.py b/tests/test_client_request.py index f8107ffad88..c54e1828e34 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -1,7 +1,7 @@ import asyncio import hashlib import io -import os.path +import pathlib import urllib.parse import zlib from http.cookies import BaseCookie, Morsel, SimpleCookie @@ -921,12 +921,11 @@ async def test_chunked_transfer_encoding(loop, conn) -> None: async def test_file_upload_not_chunked(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: req = ClientRequest("post", URL("http://python.org/"), data=f, loop=loop) assert not req.chunked - assert req.headers["CONTENT-LENGTH"] == str(os.path.getsize(fname)) + assert req.headers["CONTENT-LENGTH"] == str(file_path.stat().st_size) await req.close() @@ -947,19 +946,17 @@ async def test_precompressed_data_stays_intact(loop) -> None: async def test_file_upload_not_chunked_seek(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: f.seek(100) req = ClientRequest("post", URL("http://python.org/"), data=f, loop=loop) - assert req.headers["CONTENT-LENGTH"] == str(os.path.getsize(fname) - 100) + assert req.headers["CONTENT-LENGTH"] == str(file_path.stat().st_size - 100) await req.close() async def test_file_upload_force_chunked(loop) -> None: - here = os.path.dirname(__file__) - fname = os.path.join(here, "aiohttp.png") - with open(fname, "rb") as f: + file_path = pathlib.Path(__file__).parent / "aiohttp.png" + with file_path.open("rb") as f: req = ClientRequest( "post", URL("http://python.org/"), data=f, chunked=True, loop=loop ) diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index cffca3a4b59..9c608959c39 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -1,9 +1,8 @@ import asyncio import datetime import itertools -import os +import pathlib import pickle -import tempfile import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from unittest import mock @@ -200,8 +199,8 @@ async def test_constructor_with_expired( assert jar._loop is loop -async def test_save_load(loop, cookies_to_send, cookies_to_receive) -> None: - file_path = tempfile.mkdtemp() + "/aiohttp.test.cookie" +async def test_save_load(tmp_path, loop, cookies_to_send, cookies_to_receive) -> None: + file_path = pathlib.Path(str(tmp_path)) / "aiohttp.test.cookie" # export cookie jar jar_save = CookieJar(loop=loop) @@ -215,7 +214,6 @@ async def test_save_load(loop, cookies_to_send, cookies_to_receive) -> None: for cookie in jar_load: jar_test[cookie.key] = cookie - os.unlink(file_path) assert jar_test == cookies_to_receive diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 390d2390065..b59528d3468 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -4,7 +4,6 @@ import gc import platform import sys -import tempfile import weakref from math import ceil, modf from pathlib import Path @@ -73,11 +72,21 @@ def test_parse_mimetype(mimetype, expected) -> None: # ------------------- guess_filename ---------------------------------- -def test_guess_filename_with_tempfile() -> None: - with tempfile.TemporaryFile() as fp: +def test_guess_filename_with_file_object(tmp_path) -> None: + file_path = tmp_path / "test_guess_filename" + with file_path.open("w+b") as fp: assert helpers.guess_filename(fp, "no-throw") is not None +def test_guess_filename_with_path(tmp_path) -> None: + file_path = tmp_path / "test_guess_filename" + assert helpers.guess_filename(file_path, "no-throw") is not None + + +def test_guess_filename_with_default() -> None: + assert helpers.guess_filename(None, "no-throw") == "no-throw" + + # ------------------- BasicAuth ----------------------------------- diff --git a/tests/test_multipart.py b/tests/test_multipart.py index c68ba2dd6ff..f9d130e7949 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -1,6 +1,7 @@ import asyncio import io import json +import pathlib import zlib from unittest import mock @@ -1270,7 +1271,7 @@ async def test_write_preserves_content_disposition(self, buf, stream) -> None: async def test_preserve_content_disposition_header(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, @@ -1297,7 +1298,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): async def test_set_content_disposition_override(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, @@ -1324,7 +1325,7 @@ async def test_set_content_disposition_override(self, buf, stream): async def test_reset_content_disposition_header(self, buf, stream): # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 - with open(__file__, "rb") as fobj: + with pathlib.Path(__file__).open("rb") as fobj: with aiohttp.MultipartWriter("form-data", boundary=":") as writer: part = writer.append( fobj, diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index f199404f159..099922ac77f 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -731,7 +731,7 @@ async def test_proxy_from_env_http_with_auth_from_netrc( auth.login, auth.password, ) - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( os.environ, {"http_proxy": str(proxy.url), "NETRC": str(netrc_file)} @@ -757,7 +757,7 @@ async def test_proxy_from_env_http_without_auth_from_netrc( auth.login, auth.password, ) - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( os.environ, {"http_proxy": str(proxy.url), "NETRC": str(netrc_file)} @@ -780,7 +780,7 @@ async def test_proxy_from_env_http_without_auth_from_wrong_netrc( auth = aiohttp.BasicAuth("user", "pass") netrc_file = tmp_path / "test_netrc" invalid_data = f"machine 127.0.0.1 {auth.login} pass {auth.password}" - with open(str(netrc_file), "w") as f: + with netrc_file.open("w") as f: f.write(invalid_data) mocker.patch.dict( diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 588daed8d40..7c4941f9b3c 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1,4 +1,3 @@ -import os import pathlib import re from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized @@ -49,7 +48,7 @@ def fill_routes(router): def go(): route1 = router.add_route("GET", "/plain", make_handler()) route2 = router.add_route("GET", "/variable/{name}", make_handler()) - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) return [route1, route2] + list(resource) return go @@ -342,7 +341,7 @@ def test_route_dynamic(router) -> None: def test_add_static(router) -> None: resource = router.add_static( - "/st", os.path.dirname(aiohttp.__file__), name="static" + "/st", pathlib.Path(aiohttp.__file__).parent, name="static" ) assert router["static"] is resource url = resource.url_for(filename="/dir/a.txt") @@ -351,7 +350,7 @@ def test_add_static(router) -> None: def test_add_static_append_version(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/data.unknown_mime_type", append_version=True) expect_url = ( "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" @@ -361,7 +360,7 @@ def test_add_static_append_version(router) -> None: def test_add_static_append_version_set_from_constructor(router) -> None: resource = router.add_static( - "/st", os.path.dirname(__file__), append_version=True, name="static" + "/st", pathlib.Path(__file__).parent, append_version=True, name="static" ) url = resource.url_for(filename="/data.unknown_mime_type") expect_url = ( @@ -372,7 +371,7 @@ def test_add_static_append_version_set_from_constructor(router) -> None: def test_add_static_append_version_override_constructor(router) -> None: resource = router.add_static( - "/st", os.path.dirname(__file__), append_version=True, name="static" + "/st", pathlib.Path(__file__).parent, append_version=True, name="static" ) url = resource.url_for(filename="/data.unknown_mime_type", append_version=False) expect_url = "/st/data.unknown_mime_type" @@ -380,7 +379,7 @@ def test_add_static_append_version_override_constructor(router) -> None: def test_add_static_append_version_filename_without_slash(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="data.unknown_mime_type", append_version=True) expect_url = ( "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" @@ -389,27 +388,26 @@ def test_add_static_append_version_filename_without_slash(router) -> None: def test_add_static_append_version_non_exists_file(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/non_exists_file", append_version=True) assert "/st/non_exists_file" == str(url) def test_add_static_append_version_non_exists_file_without_slash(router) -> None: - resource = router.add_static("/st", os.path.dirname(__file__), name="static") + resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="non_exists_file", append_version=True) assert "/st/non_exists_file" == str(url) -def test_add_static_append_version_follow_symlink(router, tmpdir) -> None: +def test_add_static_append_version_follow_symlink(router, tmp_path) -> None: # Tests the access to a symlink, in static folder with apeend_version - tmp_dir_path = str(tmpdir) - symlink_path = os.path.join(tmp_dir_path, "append_version_symlink") - symlink_target_path = os.path.dirname(__file__) - os.symlink(symlink_target_path, symlink_path, True) + symlink_path = tmp_path / "append_version_symlink" + symlink_target_path = pathlib.Path(__file__).parent + pathlib.Path(str(symlink_path)).symlink_to(str(symlink_target_path), True) # Register global static route: resource = router.add_static( - "/st", tmp_dir_path, follow_symlinks=True, append_version=True + "/st", str(tmp_path), follow_symlinks=True, append_version=True ) url = resource.url_for(filename="/append_version_symlink/data.unknown_mime_type") @@ -421,16 +419,16 @@ def test_add_static_append_version_follow_symlink(router, tmpdir) -> None: assert expect_url == str(url) -def test_add_static_append_version_not_follow_symlink(router, tmpdir) -> None: +def test_add_static_append_version_not_follow_symlink(router, tmp_path) -> None: # Tests the access to a symlink, in static folder with apeend_version - tmp_dir_path = str(tmpdir) - symlink_path = os.path.join(tmp_dir_path, "append_version_symlink") - symlink_target_path = os.path.dirname(__file__) - os.symlink(symlink_target_path, symlink_path, True) + symlink_path = tmp_path / "append_version_symlink" + symlink_target_path = pathlib.Path(__file__).parent + + pathlib.Path(str(symlink_path)).symlink_to(str(symlink_target_path), True) # Register global static route: resource = router.add_static( - "/st", tmp_dir_path, follow_symlinks=False, append_version=True + "/st", str(tmp_path), follow_symlinks=False, append_version=True ) filename = "/append_version_symlink/data.unknown_mime_type" @@ -467,7 +465,7 @@ def test_dynamic_not_match(router) -> None: async def test_static_not_match(router) -> None: - router.add_static("/pre", os.path.dirname(aiohttp.__file__), name="name") + router.add_static("/pre", pathlib.Path(aiohttp.__file__).parent, name="name") resource = router["name"] ret = await resource.resolve(make_mocked_request("GET", "/another/path")) assert (None, set()) == ret @@ -503,17 +501,17 @@ def test_contains(router) -> None: def test_static_repr(router) -> None: - router.add_static("/get", os.path.dirname(aiohttp.__file__), name="name") + router.add_static("/get", pathlib.Path(aiohttp.__file__).parent, name="name") assert Matches(r" None: - route = router.add_static("/prefix", os.path.dirname(aiohttp.__file__)) + route = router.add_static("/prefix", pathlib.Path(aiohttp.__file__).parent) assert "/prefix" == route._prefix def test_static_remove_trailing_slash(router) -> None: - route = router.add_static("/prefix/", os.path.dirname(aiohttp.__file__)) + route = router.add_static("/prefix/", pathlib.Path(aiohttp.__file__).parent) assert "/prefix" == route._prefix @@ -778,7 +776,7 @@ def test_named_resources(router) -> None: route1 = router.add_route("GET", "/plain", make_handler(), name="route1") route2 = router.add_route("GET", "/variable/{name}", make_handler(), name="route2") route3 = router.add_static( - "/static", os.path.dirname(aiohttp.__file__), name="route3" + "/static", pathlib.Path(aiohttp.__file__).parent, name="route3" ) names = {route1.name, route2.name, route3.name} @@ -943,11 +941,11 @@ def test_resources_abc(router) -> None: def test_static_route_user_home(router) -> None: here = pathlib.Path(aiohttp.__file__).parent - home = pathlib.Path(os.path.expanduser("~")) - if not str(here).startswith(str(home)): # pragma: no cover + try: + static_dir = pathlib.Path("~") / here.relative_to(pathlib.Path.home()) + except ValueError: # pragma: no cover pytest.skip("aiohttp folder is not placed in user's HOME") - static_dir = "~/" + str(here.relative_to(home)) - route = router.add_static("/st", static_dir) + route = router.add_static("/st", str(static_dir)) assert here == route.get_info()["directory"] @@ -958,13 +956,13 @@ def test_static_route_points_to_file(router) -> None: async def test_404_for_static_resource(router) -> None: - resource = router.add_static("/st", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/st", pathlib.Path(aiohttp.__file__).parent) ret = await resource.resolve(make_mocked_request("GET", "/unknown/path")) assert (None, set()) == ret async def test_405_for_resource_adapter(router) -> None: - resource = router.add_static("/st", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/st", pathlib.Path(aiohttp.__file__).parent) ret = await resource.resolve(make_mocked_request("POST", "/st/abc.py")) assert (None, {"HEAD", "GET"}) == ret @@ -979,12 +977,12 @@ async def test_check_allowed_method_for_found_resource(router) -> None: def test_url_for_in_static_resource(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) assert URL("/static/file.txt") == resource.url_for(filename="file.txt") def test_url_for_in_static_resource_pathlib(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) assert URL("/static/file.txt") == resource.url_for( filename=pathlib.Path("file.txt") ) @@ -1163,7 +1161,7 @@ def test_frozen_app_on_subapp(app) -> None: def test_set_options_route(router) -> None: - resource = router.add_static("/static", os.path.dirname(aiohttp.__file__)) + resource = router.add_static("/static", pathlib.Path(aiohttp.__file__).parent) options = None for route in resource: if route.method == "OPTIONS": @@ -1233,7 +1231,7 @@ def test_dynamic_resource_canonical() -> None: def test_static_resource_canonical() -> None: prefix = "/prefix" - directory = str(os.path.dirname(aiohttp.__file__)) + directory = str(pathlib.Path(aiohttp.__file__).parent) canonical = prefix res = StaticResource(prefix=prefix, directory=directory) assert res.canonical == canonical diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index b044f29bc81..d67d67743ba 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,5 +1,4 @@ import asyncio -import os import pathlib import socket import zlib @@ -503,7 +502,7 @@ async def test_static_file_ssl( aiohttp_client, client_ssl_ctx, ) -> None: - dirname = os.path.dirname(__file__) + dirname = pathlib.Path(__file__).parent filename = "data.unknown_mime_type" app = web.Application() app.router.add_static("/static", dirname) @@ -524,9 +523,10 @@ async def test_static_file_ssl( async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: - dirname = os.path.dirname(__file__) + dirname = pathlib.Path(__file__).parent relpath = "../README.rst" - assert os.path.isfile(os.path.join(dirname, relpath)) + full_path = dirname / relpath + assert full_path.is_file() app = web.Application() app.router.add_static("/static", dirname) @@ -541,7 +541,7 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: assert 404 == resp.status await resp.release() - url_abspath = "/static/" + os.path.abspath(os.path.join(dirname, relpath)) + url_abspath = "/static/" + str(full_path.resolve()) resp = await client.get(url_abspath) assert 403 == resp.status await resp.release() @@ -550,36 +550,36 @@ async def test_static_file_directory_traversal_attack(aiohttp_client) -> None: def test_static_route_path_existence_check() -> None: - directory = os.path.dirname(__file__) + directory = pathlib.Path(__file__).parent web.StaticResource("/", directory) - nodirectory = os.path.join(directory, "nonexistent-uPNiOEAg5d") + nodirectory = directory / "nonexistent-uPNiOEAg5d" with pytest.raises(ValueError): web.StaticResource("/", nodirectory) async def test_static_file_huge(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 20MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) - file_st = os.stat(str(tmp_path / filename)) + file_st = file_path.stat() app = web.Application() app.router.add_static("/static", str(tmp_path)) client = await aiohttp_client(app) - resp = await client.get("/static/" + filename) + resp = await client.get("/static/" + file_path.name) assert 200 == resp.status ct = resp.headers["CONTENT-TYPE"] assert "application/octet-stream" == ct assert resp.headers.get("CONTENT-ENCODING") is None assert int(resp.headers.get("CONTENT-LENGTH")) == file_st.st_size - f = (tmp_path / filename).open("rb") + f = file_path.open("rb") off = 0 cnt = 0 while off < file_st.st_size: @@ -989,10 +989,10 @@ async def handler(request): async def test_static_file_huge_cancel(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 100MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: for i in range(1024 * 20): f.write((chr(i % 64 + 0x20) * 1024).encode()) @@ -1005,7 +1005,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) + ret = web.FileResponse(file_path) return ret app = web.Application() @@ -1030,10 +1030,10 @@ async def handler(request): async def test_static_file_huge_error(aiohttp_client, tmp_path) -> None: - filename = "huge_data.unknown_mime_type" + file_path = tmp_path / "huge_data.unknown_mime_type" # fill 20MB file - with (tmp_path / filename).open("wb") as f: + with file_path.open("wb") as f: f.seek(20 * 1024 * 1024) f.write(b"1") @@ -1042,7 +1042,7 @@ async def handler(request): tr = request.transport sock = tr.get_extra_info("socket") sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) - ret = web.FileResponse(pathlib.Path(str(tmp_path / filename))) + ret = web.FileResponse(file_path) return ret app = web.Application() diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index aee3ecd5c24..76e533e473a 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,9 +1,6 @@ import asyncio import functools -import os import pathlib -import shutil -import tempfile from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -16,24 +13,6 @@ from aiohttp.web_urldispatcher import SystemRoute -@pytest.fixture(scope="function") -def tmp_dir_path(request): - """ - Give a path for a temporary directory - - The directory is destroyed at the end of the test. - """ - # Temporary directory. - tmp_dir = tempfile.mkdtemp() - - def teardown(): - # Delete the whole directory: - shutil.rmtree(tmp_dir) - - request.addfinalizer(teardown) - return tmp_dir - - @pytest.mark.parametrize( "show_index,status,prefix,data", [ @@ -63,7 +42,7 @@ def teardown(): ], ) async def test_access_root_of_static_handler( - tmp_dir_path: pathlib.Path, + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, show_index: bool, status: int, @@ -74,22 +53,22 @@ async def test_access_root_of_static_handler( # Try to access the root of static file server, and make # sure that correct HTTP statuses are returned depending if we directory # index should be shown or not. - # Put a file inside tmp_dir_path: - my_file_path = os.path.join(tmp_dir_path, "my_file") - with open(my_file_path, "w") as fw: - fw.write("hello") + # Put a file inside tmp_path: + my_file = tmp_path / "my_file" + my_dir = tmp_path / "my_dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + with my_file.open("w") as fw: + fw.write("hello") - my_file_path = os.path.join(my_dir_path, "my_file_in_dir") - with open(my_file_path, "w") as fw: + with my_file_in_dir.open("w") as fw: fw.write("world") app = web.Application() # Register global static route: - app.router.add_static(prefix, tmp_dir_path, show_index=show_index) + app.router.add_static(prefix, str(tmp_path), show_index=show_index) client = await aiohttp_client(app) # Request the root of the static directory. @@ -103,25 +82,25 @@ async def test_access_root_of_static_handler( async def test_follow_symlink( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a symlink, in static folder data = "hello world" - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + my_dir_path = tmp_path / "my_dir" + my_dir_path.mkdir() - my_file_path = os.path.join(my_dir_path, "my_file_in_dir") - with open(my_file_path, "w") as fw: + my_file_path = my_dir_path / "my_file_in_dir" + with my_file_path.open("w") as fw: fw.write(data) - my_symlink_path = os.path.join(tmp_dir_path, "my_symlink") - os.symlink(my_dir_path, my_symlink_path) + my_symlink_path = tmp_path / "my_symlink" + pathlib.Path(str(my_symlink_path)).symlink_to(str(my_dir_path), True) app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, follow_symlinks=True) + app.router.add_static("/", str(tmp_path), follow_symlinks=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -229,7 +208,7 @@ async def test_follow_symlink_directory_traversal_after_normalization( ], ) async def test_access_to_the_file_with_spaces( - tmp_dir_path: pathlib.Path, + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient, dir_name: str, filename: str, @@ -237,21 +216,19 @@ async def test_access_to_the_file_with_spaces( ) -> None: # Checks operation of static files with spaces - my_dir_path = os.path.join(tmp_dir_path, dir_name) + my_dir_path = tmp_path / dir_name + if my_dir_path != tmp_path: + my_dir_path.mkdir() - if dir_name: - os.mkdir(my_dir_path) - - my_file_path = os.path.join(my_dir_path, filename) - - with open(my_file_path, "w") as fw: + my_file_path = my_dir_path / filename + with my_file_path.open("w") as fw: fw.write(data) app = web.Application() - url = os.path.join("/", dir_name, filename) + url = "/" + str(pathlib.Path(dir_name, filename)) - app.router.add_static("/", tmp_dir_path) + app.router.add_static("/", str(tmp_path)) client = await aiohttp_client(app) r = await client.get(url) @@ -261,7 +238,7 @@ async def test_access_to_the_file_with_spaces( async def test_access_non_existing_resource( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests accessing non-existing resource # Try to access a non-exiting resource and make sure that 404 HTTP status @@ -269,7 +246,7 @@ async def test_access_non_existing_resource( app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -323,13 +300,13 @@ def sync_handler(request): async def test_unauthorized_folder_access( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the unauthorized access to a folder of static file server. # Try to list a folder content of static file server when server does not # have permissions to do so for the folder. - my_dir_path = os.path.join(tmp_dir_path, "my_dir") - os.mkdir(my_dir_path) + my_dir = tmp_path / "my_dir" + my_dir.mkdir() app = web.Application() @@ -341,34 +318,34 @@ async def test_unauthorized_folder_access( path_constructor.return_value = path # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get("/my_dir") + r = await client.get("/" + my_dir.name) assert r.status == 403 async def test_access_symlink_loop( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a looped symlink, which could not be resolved. - my_dir_path = os.path.join(tmp_dir_path, "my_symlink") - os.symlink(my_dir_path, my_dir_path) + my_dir_path = tmp_path / "my_symlink" + pathlib.Path(str(my_dir_path)).symlink_to(str(my_dir_path), True) app = web.Application() # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. - r = await client.get("/my_symlink") + r = await client.get("/" + my_dir_path.name) assert r.status == 404 async def test_access_special_resource( - tmp_dir_path: pathlib.Path, aiohttp_client: AiohttpClient + tmp_path: pathlib.Path, aiohttp_client: AiohttpClient ) -> None: # Tests the access to a resource that is neither a file nor a directory. # Checks that if a special resource is accessed (f.e. named pipe or UNIX @@ -388,7 +365,7 @@ async def test_access_special_resource( path_constructor.return_value = path # Register global static route: - app.router.add_static("/", tmp_dir_path, show_index=True) + app.router.add_static("/", str(tmp_path), show_index=True) client = await aiohttp_client(app) # Request the root of the static directory. @@ -623,20 +600,20 @@ async def test_static_absolute_url( # /static/\\machine_name\c$ or /static/D:\path # where the static dir is totally different app = web.Application() - fname = tmp_path / "file.txt" - fname.write_text("sample text", "ascii") + file_path = tmp_path / "file.txt" + file_path.write_text("sample text", "ascii") here = pathlib.Path(__file__).parent app.router.add_static("/static", here) client = await aiohttp_client(app) - resp = await client.get("/static/" + str(fname)) + resp = await client.get("/static/" + str(file_path.resolve())) assert resp.status == 403 async def test_for_issue_5250( - aiohttp_client: AiohttpClient, tmp_dir_path: pathlib.Path + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path ) -> None: app = web.Application() - app.router.add_static("/foo", tmp_dir_path) + app.router.add_static("/foo", tmp_path) async def get_foobar(request: web.Request) -> web.Response: return web.Response(body="success!") diff --git a/tools/check_changes.py b/tools/check_changes.py index 118d1182b9a..6cc4d050cd8 100755 --- a/tools/check_changes.py +++ b/tools/check_changes.py @@ -22,7 +22,7 @@ def get_root(script_path): - folder = script_path.absolute().parent + folder = script_path.resolve().parent while not (folder / ".git").exists(): folder = folder.parent if folder == folder.anchor: From 5ff4b3c405ee741a46d6743209cd32259f939313 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 28 Jan 2024 22:52:59 +0000 Subject: [PATCH 049/144] Update version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 70ddb359640..8153b4616b5 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.2" +__version__ = "3.9.2.dev0" from typing import TYPE_CHECKING, Tuple From 33f49e8ec985e428083cd78bc11cb5fe0dff0e57 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:53:58 +0000 Subject: [PATCH 050/144] Bump pypa/cibuildwheel from 2.16.2 to 2.16.4 (#8092) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.2 to 2.16.4.
Release notes

Sourced from pypa/cibuildwheel's releases.

v2.16.4

🛠 Update manylinux pins to upgrade from a problematic PyPy version. (#1737)

v2.16.3

  • 🐛 Fix a bug when building from sdist, where relative paths to files in the package didn't work because the working directory was wrong (#1687)
  • 🛠 Adds the ability to disable mounting the host filesystem in containers to /host, through the disable_host_mount suboption on CIBW_CONTAINER_ENGINE.
  • 📚 A lot of docs improvements! (#1708, #1705, #1686, #1679, #1667, #1665)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v2.16.4

28 January 2024

  • 🛠 Update manylinux pins to upgrade from a problematic PyPy version. (#1737)

v2.16.3

26 January 2024

  • 🐛 Fix a bug when building from sdist, where relative paths to files in the package didn't work because the working directory was wrong (#1687)
  • 🛠 Adds the ability to disable mounting the host filesystem in containers to /host, through the disable_host_mount suboption on CIBW_CONTAINER_ENGINE.
  • 📚 A lot of docs improvements! (#1708, #1705, #1686, #1679, #1667, #1665)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.2&new-version=2.16.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index af3c0bf6f23..e0680c88f25 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -369,7 +369,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.16.4 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 5637e8f02a05cf0ae800ad3e175f4bbe0a3e54cc Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 18:04:02 +0000 Subject: [PATCH 051/144] [PR #8098/aca206fc backport][3.9] Fix backwards compatibility with ssl (#8101) **This is a backport of PR #8098 as merged into 3.10 (aca206fc27ecec4e0dc14de6ab11816e7f35409c).** Fixes #8097. Co-authored-by: Sam Bull --- CHANGES/8097.bugfix.rst | 1 + CHANGES/8098.bugfix.rst | 2 ++ aiohttp/client_reqrep.py | 2 ++ tests/test_connector.py | 5 +++++ 4 files changed, 10 insertions(+) create mode 120000 CHANGES/8097.bugfix.rst create mode 100644 CHANGES/8098.bugfix.rst diff --git a/CHANGES/8097.bugfix.rst b/CHANGES/8097.bugfix.rst new file mode 120000 index 00000000000..c799b052a6c --- /dev/null +++ b/CHANGES/8097.bugfix.rst @@ -0,0 +1 @@ +8098.bugfix.rst \ No newline at end of file diff --git a/CHANGES/8098.bugfix.rst b/CHANGES/8098.bugfix.rst new file mode 100644 index 00000000000..0242fb978e5 --- /dev/null +++ b/CHANGES/8098.bugfix.rst @@ -0,0 +1,2 @@ +Fixed backwards compatibility breakage of ``ssl`` parameter when set outside of +``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index bb43ae9318d..e0de951a33a 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -159,6 +159,8 @@ def _merge_ssl_params( ssl_context: Optional["SSLContext"], fingerprint: Optional[bytes], ) -> Union["SSLContext", bool, Fingerprint]: + if ssl is None: + ssl = True # Double check for backwards compatibility if verify_ssl is not None and not verify_ssl: warnings.warn( "verify_ssl is deprecated, use ssl=False instead", diff --git a/tests/test_connector.py b/tests/test_connector.py index dc8aa3c2605..142abab3c15 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -2001,6 +2001,11 @@ async def test_default_use_dns_cache() -> None: assert conn.use_dns_cache +async def test_ssl_none() -> None: + conn = aiohttp.TCPConnector(ssl=None) + assert conn._ssl is True + + async def test_resolver_not_called_with_address_is_ip(loop) -> None: resolver = mock.MagicMock() connector = aiohttp.TCPConnector(resolver=resolver) From bf4edce5d6c967fa1d6a581b3aaab1bd2f5121cd Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 29 Jan 2024 19:13:14 +0000 Subject: [PATCH 052/144] Release v3.9.3 (#8102) --- CHANGES.rst | 31 +++++++++++++++++++++++++++++++ CHANGES/3957.misc | 1 - CHANGES/8097.bugfix.rst | 1 - CHANGES/8098.bugfix.rst | 2 -- aiohttp/__init__.py | 2 +- 5 files changed, 32 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/3957.misc delete mode 120000 CHANGES/8097.bugfix.rst delete mode 100644 CHANGES/8098.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index c26cc90e76e..64dff9b516d 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,37 @@ .. towncrier release notes start +3.9.3 (2024-01-29) +================== + +Bug fixes +--------- + +- Fixed backwards compatibility breakage (in 3.9.2) of ``ssl`` parameter when set outside + of ``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8097`, :issue:`8098`. + + + + +Miscellaneous internal changes +------------------------------ + +- Improved test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. + + + *Related issues and pull requests on GitHub:* + :issue:`3957`. + + + + +---- + + 3.9.2 (2024-01-28) ================== diff --git a/CHANGES/3957.misc b/CHANGES/3957.misc deleted file mode 100644 index b4f9f58edb9..00000000000 --- a/CHANGES/3957.misc +++ /dev/null @@ -1 +0,0 @@ -Improve test suite handling of paths and temp files to consistently use pathlib and pytest fixtures. diff --git a/CHANGES/8097.bugfix.rst b/CHANGES/8097.bugfix.rst deleted file mode 120000 index c799b052a6c..00000000000 --- a/CHANGES/8097.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -8098.bugfix.rst \ No newline at end of file diff --git a/CHANGES/8098.bugfix.rst b/CHANGES/8098.bugfix.rst deleted file mode 100644 index 0242fb978e5..00000000000 --- a/CHANGES/8098.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed backwards compatibility breakage of ``ssl`` parameter when set outside of -``ClientSession`` (e.g. directly in ``TCPConnector``) -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 8153b4616b5..12209def6c4 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.2.dev0" +__version__ = "3.9.3" from typing import TYPE_CHECKING, Tuple From 9e938f1ccf78346241342f1aa2fd590eee331a72 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 29 Jan 2024 21:08:36 +0000 Subject: [PATCH 053/144] Bump version --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 12209def6c4..6f6ab8e6b99 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.3" +__version__ = "3.9.3.dev0" from typing import TYPE_CHECKING, Tuple From 77052043da4c63cb2178523b9b6f62467eec1d9f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 01:37:02 +0100 Subject: [PATCH 054/144] =?UTF-8?q?[PR=20#8099/3aa243ae=20backport][3.10]?= =?UTF-8?q?=20=F0=9F=93=9D=F0=9F=92=85=20Ask=20PR=20submitters=20about=20t?= =?UTF-8?q?he=20complexities=20(#8106)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8099 as merged into master (3aa243ae89c597b5576863a7ac309732f6756151).** I envision this may help the contributors and the maintainers be more mindful regarding what goes into the repository, by illuminating the connected burdens of maintaining the contributions long-term. ## What do these changes do? $sbj. ## Are there changes in behavior for the user? Nope. ## Related issue number N/A ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- .github/PULL_REQUEST_TEMPLATE.md | 14 ++++++++++++++ CHANGES/8099.contrib.rst | 4 ++++ 2 files changed, 18 insertions(+) create mode 100644 CHANGES/8099.contrib.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 686f70cd975..d4b1dba4340 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,6 +8,20 @@ +## Is it a substantial burden for the maintainers to support this? + + + ## Related issue number diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst new file mode 100644 index 00000000000..827ecfa5827 --- /dev/null +++ b/CHANGES/8099.contrib.rst @@ -0,0 +1,4 @@ +The pull request template is now asking the contributors to +answer a question about the long-term maintenance challenges +they envision as a result of merging their patches +-- by :user:`webknjaz`. From ab2e368f7e5369682af2046f13e3558786782df3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 01:37:07 +0100 Subject: [PATCH 055/144] =?UTF-8?q?[PR=20#8099/3aa243ae=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=93=9D=F0=9F=92=85=20Ask=20PR=20submitters=20about=20t?= =?UTF-8?q?he=20complexities=20(#8105)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8099 as merged into master (3aa243ae89c597b5576863a7ac309732f6756151).** I envision this may help the contributors and the maintainers be more mindful regarding what goes into the repository, by illuminating the connected burdens of maintaining the contributions long-term. ## What do these changes do? $sbj. ## Are there changes in behavior for the user? Nope. ## Related issue number N/A ## Checklist - [ ] I think the code is well written - [ ] Unit tests for the changes exist - [x] Documentation reflects the changes - [x] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: Sviatoslav Sydorenko --- .github/PULL_REQUEST_TEMPLATE.md | 14 ++++++++++++++ CHANGES/8099.contrib.rst | 4 ++++ 2 files changed, 18 insertions(+) create mode 100644 CHANGES/8099.contrib.rst diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 686f70cd975..d4b1dba4340 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,6 +8,20 @@ +## Is it a substantial burden for the maintainers to support this? + + + ## Related issue number diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst new file mode 100644 index 00000000000..827ecfa5827 --- /dev/null +++ b/CHANGES/8099.contrib.rst @@ -0,0 +1,4 @@ +The pull request template is now asking the contributors to +answer a question about the long-term maintenance challenges +they envision as a result of merging their patches +-- by :user:`webknjaz`. From 2a6e1c316029d713d0f69abb192029620c5ea3e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:06:37 +0100 Subject: [PATCH 056/144] [PR #8107/854e6d87 backport][3.10] Partially revert "Add more information to contributing page (#7916)" (#8109) **This is a backport of PR #8107 as merged into master (854e6d8702c20e725b3e747ab48265ecf0184e20).** This partially reverts commit 822fbc7431f3c5522d3e587ad0b658bef8b6a0ab. In particular, this drops the top level title from README in the `CHANGES/` folder and restores the original label. For the proper Sphinx ToC structuring, a title in the `docs/contributing.rst` document. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/README.rst | 22 +++++++--------------- docs/contributing.rst | 26 ++++++++++++++------------ 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 5beb8999226..37cd14d2cf8 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,15 +1,7 @@ -.. _Making a pull request: - -Making a pull request -===================== - -When making a pull request, please include a short summary of the changes -and a reference to any issue tickets that the PR is intended to solve. -All PRs with code changes should include tests. All changes should -include a changelog entry. +.. _Adding change notes with your PRs: Adding change notes with your PRs ---------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -27,7 +19,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ---------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``aiohttp`` uses `towncrier `_ for changelog management. @@ -47,9 +39,9 @@ linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. -If you wish to sign your change, feel free to add -``-- by :user:`github-username``` at the end (replace -``github-username`` with your own!). +If you wish to sign your change, feel free to add ``-- by +:user:`github-username``` at the end (replace ``github-username`` +with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -88,7 +80,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ------------------------------------------------------------ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/contributing.rst b/docs/contributing.rst index 84d223d0e0b..6497212813d 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -************ +============ (:doc:`contributing-admins`) Instructions for contributors -============================= +----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite -============================================ +-------------------------------------------- We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter -================= +----------------- The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite -====================== +---------------------- After all the preconditions are met you can run tests typing the next command: @@ -159,7 +159,7 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions Code coverage -============= +------------- We use *codecov.io* as an indispensable tool for analyzing our coverage results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage @@ -226,7 +226,7 @@ $ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html ``` Documentation -============= +------------- We encourage documentation improvements. @@ -242,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking -============== +-------------- We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -261,18 +261,20 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Changelog update +---------------- .. include:: ../CHANGES/README.rst Making a Pull Request -===================== +--------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting -=========== +----------- All Pull Requests are created against *master* git branch. @@ -313,7 +315,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer -================================== +---------------------------------- Contribute! From d4bef7a3e76cc0c7ecd5a2a6e48f1b0b58656e5b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:06:42 +0100 Subject: [PATCH 057/144] [PR #8107/854e6d87 backport][3.9] Partially revert "Add more information to contributing page (#7916)" (#8108) **This is a backport of PR #8107 as merged into master (854e6d8702c20e725b3e747ab48265ecf0184e20).** This partially reverts commit 822fbc7431f3c5522d3e587ad0b658bef8b6a0ab. In particular, this drops the top level title from README in the `CHANGES/` folder and restores the original label. For the proper Sphinx ToC structuring, a title in the `docs/contributing.rst` document. Co-authored-by: Sviatoslav Sydorenko --- CHANGES/README.rst | 22 +++++++--------------- docs/contributing.rst | 26 ++++++++++++++------------ 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 5beb8999226..37cd14d2cf8 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -1,15 +1,7 @@ -.. _Making a pull request: - -Making a pull request -===================== - -When making a pull request, please include a short summary of the changes -and a reference to any issue tickets that the PR is intended to solve. -All PRs with code changes should include tests. All changes should -include a changelog entry. +.. _Adding change notes with your PRs: Adding change notes with your PRs ---------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is very important to maintain a log for news of how updating to the new version of the software will affect @@ -27,7 +19,7 @@ to the end-users most of the time. And so such details should be recorded in the Git history rather than a changelog. Alright! So how to add a news fragment? ---------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``aiohttp`` uses `towncrier `_ for changelog management. @@ -47,9 +39,9 @@ linking parts of the docs or external sites. However, you do not need to reference the issue or PR numbers here as *towncrier* will automatically add a reference to all of the affected issues when rendering the news file. -If you wish to sign your change, feel free to add -``-- by :user:`github-username``` at the end (replace -``github-username`` with your own!). +If you wish to sign your change, feel free to add ``-- by +:user:`github-username``` at the end (replace ``github-username`` +with your own!). Finally, name your file following the convention that Towncrier understands: it should start with the number of an issue or a @@ -88,7 +80,7 @@ necessary to make a separate documentation fragment for documentation changes accompanying the relevant code changes. Examples for adding changelog entries to your Pull Requests ------------------------------------------------------------ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File :file:`CHANGES/6045.doc.1.rst`: diff --git a/docs/contributing.rst b/docs/contributing.rst index 84d223d0e0b..6497212813d 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -1,12 +1,12 @@ .. _aiohttp-contributing: Contributing -************ +============ (:doc:`contributing-admins`) Instructions for contributors -============================= +----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. @@ -25,7 +25,7 @@ Workflow is pretty straightforward: 4. Make sure all tests passed - 5. Add a file into the ``CHANGES`` folder (see `Making a pull request`_ for how). + 5. Add a file into the ``CHANGES`` folder (see `Changelog update`_ for how). 6. Commit changes to your own aiohttp clone @@ -53,7 +53,7 @@ Workflow is pretty straightforward: Preconditions for running aiohttp test suite -============================================ +-------------------------------------------- We expect you to use a python virtual environment to run our tests. @@ -116,7 +116,7 @@ Congratulations, you are ready to run the test suite! Run autoformatter -================= +----------------- The project uses black_ + isort_ formatters to keep the source code style. Please run `make fmt` after every change before starting tests. @@ -127,7 +127,7 @@ Please run `make fmt` after every change before starting tests. Run aiohttp test suite -====================== +---------------------- After all the preconditions are met you can run tests typing the next command: @@ -159,7 +159,7 @@ Any extra texts (print statements and so on) should be removed. make test-3.10-no-extensions Code coverage -============= +------------- We use *codecov.io* as an indispensable tool for analyzing our coverage results. Visit https://codecov.io/gh/aio-libs/aiohttp to see coverage @@ -226,7 +226,7 @@ $ python -m webbrowser -n file://"$(pwd)"/htmlcov/index.html ``` Documentation -============= +------------- We encourage documentation improvements. @@ -242,7 +242,7 @@ Once it finishes it will output the index html page Go to the link and make sure your doc changes looks good. Spell checking -============== +-------------- We use ``pyenchant`` and ``sphinxcontrib-spelling`` for running spell checker for documentation: @@ -261,18 +261,20 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Changelog update +---------------- .. include:: ../CHANGES/README.rst Making a Pull Request -===================== +--------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. Backporting -=========== +----------- All Pull Requests are created against *master* git branch. @@ -313,7 +315,7 @@ like *needs backport to 3.1*. merging the backport. How to become an aiohttp committer -================================== +---------------------------------- Contribute! From 9f5537d24ab14ff7839fc5b76ab9b36c20cd3af3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:58:36 +0100 Subject: [PATCH 058/144] [PR #8110/2d8ffdf5 backport][3.10] Re-add a PR preparation intro from PR #7916 (#8112) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8110 as merged into master (2d8ffdf5bdab05b7bf8aa7fc9fc8c4c9eee1d57e).** None Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- docs/contributing.rst | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6497212813d..9abd367a150 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -261,13 +261,22 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Preparing a pull request +------------------------ + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. + + Changelog update ---------------- .. include:: ../CHANGES/README.rst -Making a Pull Request +Making a pull request --------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. From efe3c8895ba44796e031fbb96784d6bc5807072a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 03:58:47 +0100 Subject: [PATCH 059/144] [PR #8110/2d8ffdf5 backport][3.9] Re-add a PR preparation intro from PR #7916 (#8111) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8110 as merged into master (2d8ffdf5bdab05b7bf8aa7fc9fc8c4c9eee1d57e).** None Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- docs/contributing.rst | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/contributing.rst b/docs/contributing.rst index 6497212813d..9abd367a150 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -261,13 +261,22 @@ To run spell checker on Linux box you should install it first: $ pip install sphinxcontrib-spelling +Preparing a pull request +------------------------ + +When making a pull request, please include a short summary of the changes +and a reference to any issue tickets that the PR is intended to solve. +All PRs with code changes should include tests. All changes should +include a changelog entry. + + Changelog update ---------------- .. include:: ../CHANGES/README.rst -Making a Pull Request +Making a pull request --------------------- After finishing all steps make a GitHub_ Pull Request with *master* base branch. From 36b6c53ede51c8ce73572b4fe458a96a59aea127 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 02:45:29 +0100 Subject: [PATCH 060/144] =?UTF-8?q?[PR=20#8113/8b33fe6f=20backport][3.10]?= =?UTF-8?q?=20=F0=9F=93=9D=20Make=20the=20changelog=20examples=20elaborate?= =?UTF-8?q?=20(#8115)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8113 as merged into master (8b33fe6f3cfc0ecf8747586f1cf47542d0a6039b).** This patch shows the use of sentences with full stops and it also includes an excerpt with a paragraph, which demonstrates that it is possible to include details extending shorter sentences. Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/README.rst | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 37cd14d2cf8..78d8b2f308f 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -86,21 +86,28 @@ File :file:`CHANGES/6045.doc.1.rst`: .. code-block:: rst - Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz` + Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`. -File :file:`CHANGES/4431.bugfix.rst`: +File :file:`CHANGES/8074.bugfix.rst`: .. code-block:: rst - Fixed HTTP client requests to honor ``no_proxy`` environment - variables -- by :user:`scirelli` + Fixed an unhandled exception in the Python HTTP parser on header + lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP + major and minor version are now rejected. Invalid header field + names containing question mark or slash are now rejected. Such + requests are incompatible with :rfc:`9110#section-5.6.2` and are + not known to be of any legitimate use. File :file:`CHANGES/4594.feature.rst`: .. code-block:: rst Added support for ``ETag`` to :py:class:`~aiohttp.web.FileResponse` - -- by :user:`greshilov`, :user:`serhiy-storchaka` and :user:`asvetlov` + -- by :user:`greshilov`, :user:`serhiy-storchaka` and + :user:`asvetlov`. .. tip:: From 36952e45fd942492d8ab74fc3b7d480e16ee21e6 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 02:45:38 +0100 Subject: [PATCH 061/144] =?UTF-8?q?[PR=20#8113/8b33fe6f=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=93=9D=20Make=20the=20changelog=20examples=20elaborate?= =?UTF-8?q?=20(#8114)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8113 as merged into master (8b33fe6f3cfc0ecf8747586f1cf47542d0a6039b).** This patch shows the use of sentences with full stops and it also includes an excerpt with a paragraph, which demonstrates that it is possible to include details extending shorter sentences. Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/README.rst | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGES/README.rst b/CHANGES/README.rst index 37cd14d2cf8..78d8b2f308f 100644 --- a/CHANGES/README.rst +++ b/CHANGES/README.rst @@ -86,21 +86,28 @@ File :file:`CHANGES/6045.doc.1.rst`: .. code-block:: rst - Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz` + Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`. -File :file:`CHANGES/4431.bugfix.rst`: +File :file:`CHANGES/8074.bugfix.rst`: .. code-block:: rst - Fixed HTTP client requests to honor ``no_proxy`` environment - variables -- by :user:`scirelli` + Fixed an unhandled exception in the Python HTTP parser on header + lines starting with a colon -- by :user:`pajod`. + + Invalid request lines with anything but a dot between the HTTP + major and minor version are now rejected. Invalid header field + names containing question mark or slash are now rejected. Such + requests are incompatible with :rfc:`9110#section-5.6.2` and are + not known to be of any legitimate use. File :file:`CHANGES/4594.feature.rst`: .. code-block:: rst Added support for ``ETag`` to :py:class:`~aiohttp.web.FileResponse` - -- by :user:`greshilov`, :user:`serhiy-storchaka` and :user:`asvetlov` + -- by :user:`greshilov`, :user:`serhiy-storchaka` and + :user:`asvetlov`. .. tip:: From 814672551f7a8cfd06a42ef3353e9713cb28b0c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 10:36:23 +0000 Subject: [PATCH 062/144] Bump multidict from 6.0.4 to 6.0.5 (#8128) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [multidict](https://github.com/aio-libs/multidict) from 6.0.4 to 6.0.5.
Release notes

Sourced from multidict's releases.

6.0.5

Bug fixes

  • Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by @​iemelyanov💰.

    Related issues and pull requests on GitHub: #862, #864, #868, #898.

  • Reverted to using the public argument parsing API PyArg_ParseTupleAndKeywords() under Python 3.12 -- by @​charles-dyfis-net💰 and @​webknjaz💰.

    The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in #926. It also fixes a segmentation fault crash caused by passing keyword arguments to MultiDict.getall() discovered by @​jonaslb💰 and @​hroncok💰 while examining the problem.

    Related issues and pull requests on GitHub: #862, #909, #926, #929.

  • Fixed a SystemError: null argument to internal routine error on a MultiDict.items().isdisjoint() call when using C Extensions.

    Related issues and pull requests on GitHub: #927.

Improved documentation

  • On the Contributing docs age, a link to the Towncrier philosophy has been fixed.

    Related issues and pull requests on GitHub: #911.

Packaging updates and notes for downstreams

  • Stopped marking all files as installable package data -- by @​webknjaz💰.

    This change helps setuptools understand that C-headers are not to be installed under lib/python3.{x}/site-packages/.

    Related commits on GitHub: 31e1170.

... (truncated)

Changelog

Sourced from multidict's changelog.

6.0.5 (2024-02-01)

Bug fixes

  • Upgraded the C-API macros that have been deprecated in Python 3.9 and later removed in 3.13 -- by :user:iemelyanov.

    Related issues and pull requests on GitHub: :issue:862, :issue:864, :issue:868, :issue:898.

  • Reverted to using the public argument parsing API :c:func:PyArg_ParseTupleAndKeywords under Python 3.12 -- by :user:charles-dyfis-net and :user:webknjaz.

    The effect is that this change prevents build failures with clang 16.9.6 and gcc-14 reported in :issue:926. It also fixes a segmentation fault crash caused by passing keyword arguments to :py:meth:MultiDict.getall() <multidict.MultiDict.getall> discovered by :user:jonaslb and :user:hroncok while examining the problem.

    Related issues and pull requests on GitHub: :issue:862, :issue:909, :issue:926, :issue:929.

  • Fixed a SystemError: null argument to internal routine error on a MultiDict.items().isdisjoint() call when using C Extensions.

    Related issues and pull requests on GitHub: :issue:927.

Improved documentation

  • On the Contributing docs <https://github.com/aio-libs/multidict/blob/master/CHANGES/README.rst>_ page, a link to the Towncrier philosophy has been fixed.

    Related issues and pull requests on GitHub:

... (truncated)

Commits
  • a9b281b ⇪ 📦 Release v6.0.5
  • ed825c8 🧪 Download artifacts to dist/ @ release job
  • 7b04a64 🧪 Normalize issue refs @ release action
  • 74840e8 🧪 Pass Codecov token to reusable linters job
  • 41c133e 🧪 Bump Codecov action to v4
  • adb1976 📝 Fix return type @ Sphinx config
  • 99e435f 📝 Mention bylines in the changelog guidelines
  • 736169e 📝 Clarify need to only ref PR @ change note name
  • 887846f 📝 Highlight the RST term @ changelog guide
  • 8f57f8a 📝 Add a missing comma @ changelog guide
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.0.4&new-version=6.0.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 33495ca2ba3..e10f80a9cca 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==21.2.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c9d48ea05d3..02ccf9390da 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -107,7 +107,7 @@ jinja2==3.0.3 # towncrier markupsafe==2.0.1 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index f6b3175f34a..201da88c351 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.0.8 # via -r requirements/cython.in -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in typing-extensions==4.9.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 945a21380d1..866b00bd9fa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -103,7 +103,7 @@ jinja2==3.1.2 # towncrier markupsafe==2.1.3 # via jinja2 -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 9c4f984cd75..915f9c24dcc 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.0.4 +multidict==6.0.5 # via -r requirements/multidict.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 2263f16bcfa..4a968058d61 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.4.1 # aiosignal idna==3.4 # via yarl -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index cc531b48df1..29021aecde1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,7 +53,7 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest -multidict==6.0.4 +multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl From 9726a679b4f1fe2247065f1a0f9b30dbd2495fda Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 13:23:23 -0600 Subject: [PATCH 063/144] [PR #8127/b97d9a97 backport][3.10] Treat Accept-Encoding header as case-insensitive for gzip file check (#8131) Co-authored-by: Steve Repsher This is a backport of PR #8127 as merged into master (b97d9a9). --- CHANGES/8104.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_fileresponse.py | 6 +++++- aiohttp/web_response.py | 2 ++ tests/test_web_sendfile.py | 5 ++++- 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8104.bugfix.rst diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst new file mode 100644 index 00000000000..1ebe6f06d9d --- /dev/null +++ b/CHANGES/8104.bugfix.rst @@ -0,0 +1 @@ +Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 475ec8604e2..8df68497dbe 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -299,6 +299,7 @@ Stepan Pletnev Stephan Jaensch Stephen Cirelli Stephen Granade +Steve Repsher Steven Seguin Sunghyun Hwang Sunit Deshpande diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 6496ffaf317..7dbe50f0a5a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -145,7 +145,11 @@ def _get_file_path_stat_and_gzip( async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + check_for_gzipped_file = ( + "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + ) filepath, st, gzip = await loop.run_in_executor( None, self._get_file_path_stat_and_gzip, check_for_gzipped_file ) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index b6a4ba9b31e..40d6f01ecaa 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -400,6 +400,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) else: + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() for coding in ContentCoding: if coding.value in accept_encoding: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 2817e085a6f..d472c407b7a 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -8,7 +8,10 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( - "GET", "http://python.org/logo.png", headers={hdrs.ACCEPT_ENCODING: "gzip"} + "GET", + "http://python.org/logo.png", + # Header uses some uppercase to ensure case-insensitive treatment + headers={hdrs.ACCEPT_ENCODING: "GZip"}, ) gz_filepath = mock.create_autospec(Path, spec_set=True) From a4e94ffc341d884ed0345d634b56984e1c460274 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 13:23:38 -0600 Subject: [PATCH 064/144] [PR #8127/b97d9a97 backport][3.9] Treat Accept-Encoding header as case-insensitive for gzip file check (#8130) Co-authored-by: Steve Repsher This is a backport of PR #8127 as merged into master (b97d9a9). --- CHANGES/8104.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/web_fileresponse.py | 6 +++++- aiohttp/web_response.py | 2 ++ tests/test_web_sendfile.py | 5 ++++- 5 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8104.bugfix.rst diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst new file mode 100644 index 00000000000..1ebe6f06d9d --- /dev/null +++ b/CHANGES/8104.bugfix.rst @@ -0,0 +1 @@ +Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 64b6f575f26..be4a3ad48d4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -299,6 +299,7 @@ Stepan Pletnev Stephan Jaensch Stephen Cirelli Stephen Granade +Steve Repsher Steven Seguin Sunghyun Hwang Sunit Deshpande diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 6496ffaf317..7dbe50f0a5a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -145,7 +145,11 @@ def _get_file_path_stat_and_gzip( async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() - check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "") + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + check_for_gzipped_file = ( + "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + ) filepath, st, gzip = await loop.run_in_executor( None, self._get_file_path_stat_and_gzip, check_for_gzipped_file ) diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index b6a4ba9b31e..40d6f01ecaa 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -400,6 +400,8 @@ async def _start_compression(self, request: "BaseRequest") -> None: if self._compression_force: await self._do_start_compression(self._compression_force) else: + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() for coding in ContentCoding: if coding.value in accept_encoding: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index 2817e085a6f..d472c407b7a 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -8,7 +8,10 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: request = make_mocked_request( - "GET", "http://python.org/logo.png", headers={hdrs.ACCEPT_ENCODING: "gzip"} + "GET", + "http://python.org/logo.png", + # Header uses some uppercase to ensure case-insensitive treatment + headers={hdrs.ACCEPT_ENCODING: "GZip"}, ) gz_filepath = mock.create_autospec(Path, spec_set=True) From e56e9b9d9de9344bba572cf8e1627cc40c87938d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 3 Feb 2024 18:12:53 +0000 Subject: [PATCH 065/144] Bump pypa/cibuildwheel from 2.16.4 to 2.16.5 (#8120) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.4 to 2.16.5.
Release notes

Sourced from pypa/cibuildwheel's releases.

v2.16.5

  • 🐛 Fix an incompatibility with the GitHub Action and new GitHub Runner images for Windows that bundle Powershell 7.3+ (#1741)
  • 🛠 Preliminary support for new macos-14 arm64 runners (#1743)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v2.16.5

30 January 2024

  • 🐛 Fix an incompatibility with the GitHub Action and new GitHub Runner images for Windows that bundle Powershell 7.3+ (#1741)
  • 🛠 Preliminary support for new macos-14 arm64 runners (#1743)
Commits
  • ce3fb78 Bump version: v2.16.5
  • 5b0b458 fix: download pipx for action, allow support for M1 (#1743)
  • a7ea5fb Merge pull request #1739 from henryiii/henryiii/chore/checkschemas
  • bc55e8b Merge pull request #1741 from jborean93/pwsh-7.4
  • c753cd2 Add support for PowerShell 7.4 in GHA
  • 07bd78c chore: check schemas
  • d7db575 docs: add keyvi as an example that combines cibuildwheel with the ccache acti...
  • 7154e18 [Bot] Update dependencies (#1738)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.4&new-version=2.16.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e0680c88f25..f6647f7cd8b 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -369,7 +369,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.16.4 + uses: pypa/cibuildwheel@v2.16.5 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 257a7c4d031061ed28f429222fbb9f7bb4d74753 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 11:29:02 +0000 Subject: [PATCH 066/144] bump slotscheck from 0.17.1 to 0.17.3 (#8135) [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.17.1&new-version=0.17.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 02ccf9390da..1e3fb385de6 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -193,7 +193,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 866b00bd9fa..d9197e86828 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -182,7 +182,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 68000cbcc85..1976da1d1ba 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.17.1 +slotscheck==0.17.3 # via -r requirements/lint.in tomli==2.0.1 # via From 0467c9b43f094197ac2c2a871cbe2884e2ace04f Mon Sep 17 00:00:00 2001 From: "Paul J. Dorn" Date: Thu, 8 Feb 2024 00:09:07 +0000 Subject: [PATCH 067/144] Backport 3.10: Add tests, accidentally dropped before (#8088) (#8141) Cherry picked from commit 0016004f0e5b861d35afc56a9a59040769af3122 --- CHANGES/8088.contrib.rst | 1 + tests/test_http_parser.py | 55 ++++++++++++++++++++++++++++++++------- 2 files changed, 47 insertions(+), 9 deletions(-) create mode 100644 CHANGES/8088.contrib.rst diff --git a/CHANGES/8088.contrib.rst b/CHANGES/8088.contrib.rst new file mode 100644 index 00000000000..b3aec71bdf7 --- /dev/null +++ b/CHANGES/8088.contrib.rst @@ -0,0 +1 @@ +Enabled HTTP parser tests originally intended for 3.9.2 release -- by :user:`pajod`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index b931730529d..3c47231e389 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -283,9 +283,20 @@ def test_parse_headers_longline(parser: Any) -> None: parser.feed_data(text) +@pytest.fixture +def xfail_c_parser_status(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.BadStatusLine, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_status") def test_parse_unusual_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"#smol //a HTTP/1.3\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) assert len(messages) == 1 @@ -612,24 +623,37 @@ def test_headers_content_length_err_2(parser) -> None: } +@pytest.fixture +def xfail_c_parser_empty_header(request) -> None: + if not all( + (request.getfixturevalue(name) == b"") for name in ("pad1", "pad2", "hdr") + ): + return + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + ) + ) + + @pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) +@pytest.mark.usefixtures("xfail_c_parser_empty_header") def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) if pad1 == pad2 == b"" and hdr != b"": # one entry in param matrix is correct: non-empty name, not padded expectation = nullcontext() - if pad1 == pad2 == hdr == b"": - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") with expectation: parser.feed_data(text) def test_empty_header_name(parser) -> None: - if not isinstance(response, HttpResponseParserPy): + if not isinstance(parser, HttpRequestParserPy): pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): @@ -807,9 +831,20 @@ def test_http_request_upgrade(parser: Any) -> None: assert tail == b"some raw data" +@pytest.fixture +def xfail_c_parser_url(request) -> None: + if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): + return + request.node.add_marker( + pytest.mark.xfail( + reason="Regression test for Py parser. May match C behaviour later.", + raises=http_exceptions.InvalidURLError, + ) + ) + + +@pytest.mark.usefixtures("xfail_c_parser_url") def test_http_request_parser_utf8_request_line(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") messages, upgrade, tail = parser.feed_data( # note the truncated unicode sequence b"GET /P\xc3\xbcnktchen\xa0\xef\xb7 HTTP/1.1\r\n" + @@ -829,7 +864,9 @@ def test_http_request_parser_utf8_request_line(parser) -> None: assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url.path == URL("/P%C3%BCnktchen\udca0\udcef\udcb7").path + # python HTTP parser depends on Cython and CPython URL to match + # .. but yarl.URL("/abs") is not equal to URL.build(path="/abs"), see #6409 + assert msg.url == URL.build(path="/Pünktchen\udca0\udcef\udcb7", encoded=True) def test_http_request_parser_utf8(parser) -> None: From a7a72aefc39a83274ed250ea9f6601cbfdb6eebf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 18:58:08 +0000 Subject: [PATCH 068/144] [PR #8143/5df14cf7 backport][3.9] Add CODECOV_TOKEN (#8144) **This is a backport of PR #8143 as merged into master (5df14cf7ede67442e4934b28309674efbfaff2af).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f6647f7cd8b..9a2b6f4c353 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -252,6 +252,7 @@ jobs: }},Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() From b3e5376a1921170b635e5b93dee53f46130f955d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 19:04:23 +0000 Subject: [PATCH 069/144] [PR #8143/5df14cf7 backport][3.10] Add CODECOV_TOKEN (#8145) **This is a backport of PR #8143 as merged into master (5df14cf7ede67442e4934b28309674efbfaff2af).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 95c8a985e78..8ee338a805c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -252,6 +252,7 @@ jobs: }},Py-${{ steps.python-install.outputs.python-version }} + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() From 1dc8a072c14d817b4b64ccf4a1a72426cec15735 Mon Sep 17 00:00:00 2001 From: Alexander Macdonald Date: Thu, 8 Feb 2024 14:51:33 -0800 Subject: [PATCH 070/144] rename a shadowed test and re-enable F811 to catch future cases (#8139) (#8148) (cherry picked from commit 3c0f1eb29d3512419ea65e7cdeb61ba3f3496f00) --- CHANGES/8139.contrib.rst | 1 + setup.cfg | 2 +- tests/test_resolver.py | 26 -------------------------- tests/test_urldispatch.py | 16 +++++++++------- 4 files changed, 11 insertions(+), 34 deletions(-) create mode 100644 CHANGES/8139.contrib.rst diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst new file mode 100644 index 00000000000..fd743e70f4a --- /dev/null +++ b/CHANGES/8139.contrib.rst @@ -0,0 +1 @@ +Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/setup.cfg b/setup.cfg index 71dc26c9789..c291057ec7a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -87,7 +87,7 @@ zip_ok = false [flake8] extend-select = B950 # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,F811,D1,D4 +ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 6140e385cc1..1b389f3601b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -179,33 +179,7 @@ async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: async def test_close_for_threaded_resolver(loop) -> None: - resolver = ThreadedResolver(loop=loop) - await resolver.close() - - -async def test_threaded_negative_lookup_with_unknown_result() -> None: - loop = Mock() - - # If compile CPython with `--disable-ipv6` option, - # we will get an (int, bytes) tuple, instead of a Exception. - async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: - return [ - ( - socket.AF_INET6, - socket.SOCK_STREAM, - 6, - "", - (10, b"\x01\xbb\x00\x00\x00\x00*\x04NB\x00\x1a\x00\x00"), - ) - ] - - loop.getaddrinfo = unknown_addrinfo resolver = ThreadedResolver() - resolver._loop = loop - with patch("socket.has_ipv6", False): - res = await resolver.resolve("www.python.org") - assert len(res) == 0 - await resolver.close() diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 6a656104fd2..cbd6395e238 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1130,14 +1130,16 @@ def test_subapp_iter(app) -> None: assert list(resource) == [r1, r2] -def test_invalid_route_name(router) -> None: - with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="invalid name") - - -def test_invalid_route_name(router) -> None: +@pytest.mark.parametrize( + "route_name", + ( + "invalid name", + "class", + ), +) +def test_invalid_route_name(router, route_name: str) -> None: with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="class") # identifier + router.add_get("/", make_handler(), name=route_name) def test_frozen_router(router) -> None: From 2a5dc570d8ef27f8864dfdad04a46bb54bacdb7a Mon Sep 17 00:00:00 2001 From: Alexander Macdonald Date: Thu, 8 Feb 2024 17:38:38 -0800 Subject: [PATCH 071/144] rename a shadowed test and re-enable F811 to catch future cases (#8139) (#8147) (cherry picked from commit 3c0f1eb29d3512419ea65e7cdeb61ba3f3496f00) --- CHANGES/8139.contrib.rst | 1 + setup.cfg | 2 +- tests/test_resolver.py | 26 -------------------------- tests/test_urldispatch.py | 16 +++++++++------- 4 files changed, 11 insertions(+), 34 deletions(-) create mode 100644 CHANGES/8139.contrib.rst diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst new file mode 100644 index 00000000000..fd743e70f4a --- /dev/null +++ b/CHANGES/8139.contrib.rst @@ -0,0 +1 @@ +Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/setup.cfg b/setup.cfg index c0515be8eeb..83da3961014 100644 --- a/setup.cfg +++ b/setup.cfg @@ -86,7 +86,7 @@ zip_ok = false [flake8] extend-select = B950 # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,F811,D1,D4 +ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 6140e385cc1..1b389f3601b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -179,33 +179,7 @@ async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: async def test_close_for_threaded_resolver(loop) -> None: - resolver = ThreadedResolver(loop=loop) - await resolver.close() - - -async def test_threaded_negative_lookup_with_unknown_result() -> None: - loop = Mock() - - # If compile CPython with `--disable-ipv6` option, - # we will get an (int, bytes) tuple, instead of a Exception. - async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: - return [ - ( - socket.AF_INET6, - socket.SOCK_STREAM, - 6, - "", - (10, b"\x01\xbb\x00\x00\x00\x00*\x04NB\x00\x1a\x00\x00"), - ) - ] - - loop.getaddrinfo = unknown_addrinfo resolver = ThreadedResolver() - resolver._loop = loop - with patch("socket.has_ipv6", False): - res = await resolver.resolve("www.python.org") - assert len(res) == 0 - await resolver.close() diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 7c4941f9b3c..4f3abb8bcd7 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -1130,14 +1130,16 @@ def test_subapp_iter(app) -> None: assert list(resource) == [r1, r2] -def test_invalid_route_name(router) -> None: - with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="invalid name") - - -def test_invalid_route_name(router) -> None: +@pytest.mark.parametrize( + "route_name", + ( + "invalid name", + "class", + ), +) +def test_invalid_route_name(router, route_name: str) -> None: with pytest.raises(ValueError): - router.add_get("/", make_handler(), name="class") # identifier + router.add_get("/", make_handler(), name=route_name) def test_frozen_router(router) -> None: From eb397f3cddcd784cd7b834e0876303b0876563f5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 01:44:52 +0000 Subject: [PATCH 072/144] [PR #8140/d2ea8118 backport][3.10] Use NPM clean install and upgrade node to v18 (#8150) **This is a backport of PR #8140 as merged into master (d2ea811853c2cb9305f7afe1a96265d31edb4f3b).** Co-authored-by: Steve Repsher --- .github/workflows/ci-cd.yml | 4 ++-- CHANGES/8116.contrib.rst | 1 + Makefile | 2 +- vendor/README.rst | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8116.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 8ee338a805c..d78c4b56304 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -102,13 +102,13 @@ jobs: uses: actions/cache@v3.3.2 id: cache with: - key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} + key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} path: vendor/llhttp/build - name: Setup NodeJS if: steps.cache.outputs.cache-hit != 'true' uses: actions/setup-node@v4 with: - node-version: '14' + node-version: 18 - name: Generate llhttp sources if: steps.cache.outputs.cache-hit != 'true' run: | diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst new file mode 100644 index 00000000000..c1c7f2ca3bf --- /dev/null +++ b/CHANGES/8116.contrib.rst @@ -0,0 +1 @@ +Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/Makefile b/Makefile index e3ec98c7ce8..bb2d437a134 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror vendor/llhttp/node_modules: vendor/llhttp/package.json - cd vendor/llhttp; npm install + cd vendor/llhttp; npm ci .llhttp-gen: vendor/llhttp/node_modules $(MAKE) -C vendor/llhttp generate diff --git a/vendor/README.rst b/vendor/README.rst index 6156f37f80e..e653068b897 100644 --- a/vendor/README.rst +++ b/vendor/README.rst @@ -12,7 +12,7 @@ newer release, add ``--remote``):: Then build ``llhttp``:: cd vendor/llhttp/ - npm install + npm ci make Then build our parser:: From 9910f5f3be6f2464e0a6a9c24c0fec07bd36daf5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 01:53:16 +0000 Subject: [PATCH 073/144] [PR #8140/d2ea8118 backport][3.9] Use NPM clean install and upgrade node to v18 (#8149) **This is a backport of PR #8140 as merged into master (d2ea811853c2cb9305f7afe1a96265d31edb4f3b).** Co-authored-by: Steve Repsher --- .github/workflows/ci-cd.yml | 4 ++-- CHANGES/8116.contrib.rst | 1 + Makefile | 2 +- vendor/README.rst | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8116.contrib.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 9a2b6f4c353..a0492bccd4a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -102,13 +102,13 @@ jobs: uses: actions/cache@v4.0.0 id: cache with: - key: llhttp-${{ hashFiles('vendor/llhttp/package.json', 'vendor/llhttp/src/**/*') }} + key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} path: vendor/llhttp/build - name: Setup NodeJS if: steps.cache.outputs.cache-hit != 'true' uses: actions/setup-node@v4 with: - node-version: '14' + node-version: 18 - name: Generate llhttp sources if: steps.cache.outputs.cache-hit != 'true' run: | diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst new file mode 100644 index 00000000000..c1c7f2ca3bf --- /dev/null +++ b/CHANGES/8116.contrib.rst @@ -0,0 +1 @@ +Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/Makefile b/Makefile index e3ec98c7ce8..bb2d437a134 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c cython -3 -o $@ $< -I aiohttp -Werror vendor/llhttp/node_modules: vendor/llhttp/package.json - cd vendor/llhttp; npm install + cd vendor/llhttp; npm ci .llhttp-gen: vendor/llhttp/node_modules $(MAKE) -C vendor/llhttp generate diff --git a/vendor/README.rst b/vendor/README.rst index 6156f37f80e..e653068b897 100644 --- a/vendor/README.rst +++ b/vendor/README.rst @@ -12,7 +12,7 @@ newer release, add ``--remote``):: Then build ``llhttp``:: cd vendor/llhttp/ - npm install + npm ci make Then build our parser:: From 4682c1c049a2a39c9545490a810338bdcf98f336 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 9 Feb 2024 12:33:59 +0000 Subject: [PATCH 074/144] Upgrade to llhttp 9.2 (#8146) (#8151) (cherry picked from commit 0ec65c0f4dc08d027f659256b09ae9cff10ab404) --- CHANGES/8146.feature.rst | 1 + tests/test_http_parser.py | 18 ------------------ vendor/llhttp | 2 +- 3 files changed, 2 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8146.feature.rst diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst new file mode 100644 index 00000000000..9b0cc54206e --- /dev/null +++ b/CHANGES/8146.feature.rst @@ -0,0 +1 @@ +Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 3c47231e389..d306267c8bb 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -623,25 +623,9 @@ def test_headers_content_length_err_2(parser) -> None: } -@pytest.fixture -def xfail_c_parser_empty_header(request) -> None: - if not all( - (request.getfixturevalue(name) == b"") for name in ("pad1", "pad2", "hdr") - ): - return - if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): - return - request.node.add_marker( - pytest.mark.xfail( - reason="Regression test for Py parser. May match C behaviour later.", - ) - ) - - @pytest.mark.parametrize("hdr", [b"", b"foo"], ids=["name-empty", "with-name"]) @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) -@pytest.mark.usefixtures("xfail_c_parser_empty_header") def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) @@ -653,8 +637,6 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - if not isinstance(parser, HttpRequestParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) diff --git a/vendor/llhttp b/vendor/llhttp index 9ab2afc85b2..533845688d1 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 9ab2afc85b2880d96a94d38afaee301c6a314049 +Subproject commit 533845688d173561b9cba33269130401add38567 From d00a32b4902ea67a425ba487b9b0f2eacb187a52 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 9 Feb 2024 12:34:10 +0000 Subject: [PATCH 075/144] Upgrade to llhttp 9.2 (#8146) (#8152) (cherry picked from commit 0ec65c0f4dc08d027f659256b09ae9cff10ab404) --- CHANGES/8146.feature.rst | 1 + tests/test_http_parser.py | 2 -- vendor/llhttp | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8146.feature.rst diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst new file mode 100644 index 00000000000..9b0cc54206e --- /dev/null +++ b/CHANGES/8146.feature.rst @@ -0,0 +1 @@ +Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index b931730529d..3fb0ab77d98 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -629,8 +629,6 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - if not isinstance(response, HttpResponseParserPy): - pytest.xfail("Regression test for Py parser. May match C behaviour later.") text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) diff --git a/vendor/llhttp b/vendor/llhttp index 9ab2afc85b2..533845688d1 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 9ab2afc85b2880d96a94d38afaee301c6a314049 +Subproject commit 533845688d173561b9cba33269130401add38567 From 5e4f0b8cdbdfda71fa039fc6b55fb51cbc735c58 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 13 Feb 2024 19:49:02 -0500 Subject: [PATCH 076/144] [3.10] Create hello.txt.gz dynamically and improve related assertions (#8136) (#8156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston --- .gitattributes | 1 - CHANGES/8136.contrib.rst | 7 ++ tests/hello.txt.gz | Bin 44 -> 0 bytes tests/test_web_sendfile_functional.py | 90 ++++++++++++++++---------- 4 files changed, 63 insertions(+), 35 deletions(-) create mode 100644 CHANGES/8136.contrib.rst delete mode 100644 tests/hello.txt.gz diff --git a/.gitattributes b/.gitattributes index 1fdd659bbc9..3e8722104e7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,2 @@ tests/data.unknown_mime_type binary -tests/hello.txt.gz binary tests/sample.* binary diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst new file mode 100644 index 00000000000..69718a4e0ab --- /dev/null +++ b/CHANGES/8136.contrib.rst @@ -0,0 +1,7 @@ +A pytest fixture ``hello_txt`` was introduced to aid +static file serving tests in +:file:`test_web_sendfile_functional.py`. It dynamically +provisions ``hello.txt`` file variants shared across the +tests in the module. + +-- by :user:`steverep` diff --git a/tests/hello.txt.gz b/tests/hello.txt.gz deleted file mode 100644 index 272fee26eb260f3b3c10b8c723e31841c5131253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmb2|=HR$ss2a+^oRON7ldo4&QNnQAOV diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index d67d67743ba..57ac0849efa 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,8 +1,9 @@ import asyncio +import gzip import pathlib import socket import zlib -from typing import Any, Iterable +from typing import Any, Iterable, Optional import pytest @@ -15,6 +16,24 @@ ssl = None # type: ignore +HELLO_AIOHTTP = b"Hello aiohttp! :-)\n" + + +@pytest.fixture(scope="module") +def hello_txt(request, tmp_path_factory) -> pathlib.Path: + """Create a temp path with hello.txt and compressed versions. + + The uncompressed text file path is returned by default. Alternatively, an + indirect parameter can be passed with an encoding to get a compressed path. + """ + txt = tmp_path_factory.mktemp("hello-") / "hello.txt" + hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello[None].write_bytes(HELLO_AIOHTTP) + hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + encoding = getattr(request, "param", None) + return hello[encoding] + + @pytest.fixture def loop_without_sendfile(loop): def sendfile(*args, **kwargs): @@ -201,11 +220,14 @@ async def handler(request): await client.close() -async def test_static_file_custom_content_type(aiohttp_client, sender) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" +@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +async def test_static_file_custom_content_type( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +) -> None: + """Test that custom type without encoding is returned for encoded request.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -215,22 +237,21 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - with filepath.open("rb") as f: - content = f.read() - assert content == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() await client.close() -async def test_static_file_custom_content_type_compress(aiohttp_client, sender): - filepath = pathlib.Path(__file__).parent / "hello.txt" +async def test_static_file_custom_content_type_compress( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +): + """Test that custom type with encoding is returned for unencoded requests.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -240,24 +261,26 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert b"hello aiohttp\n" == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() -async def test_static_file_with_gziped_counter_part_enable_compression( - aiohttp_client: Any, sender: Any +@pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) +async def test_static_file_with_encoding_and_enable_compression( + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + forced_compression: Optional[web.ContentCoding], ): - """Test that enable_compression does not double compress when a .gz file is also present.""" - filepath = pathlib.Path(__file__).parent / "hello.txt" + """Test that enable_compression does not double compress when an encoded file is also present.""" async def handler(request): - resp = sender(filepath) - resp.enable_compression() + resp = sender(hello_txt) + resp.enable_compression(forced_compression) return resp app = web.Application() @@ -266,35 +289,34 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert body == b"hello aiohttp\n" - assert resp.headers["Content-Type"] == "text/plain" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() +@pytest.mark.parametrize( + ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] +) async def test_static_file_with_content_encoding( - aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str ) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" + """Test requesting static compressed files returns the correct content type and encoding.""" async def handler(request): - return sender(filepath) + return sender(hello_txt) app = web.Application() app.router.add_get("/", handler) client = await aiohttp_client(app) resp = await client.get("/") - assert 200 == resp.status - body = await resp.read() - assert b"hello aiohttp\n" == body - ct = resp.headers["CONTENT-TYPE"] - assert "text/plain" == ct - encoding = resp.headers["CONTENT-ENCODING"] - assert "gzip" == encoding + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") == expect_encoding + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() From e45da11779a1824b13f2879bb599d5d894d0e452 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 13 Feb 2024 19:49:36 -0500 Subject: [PATCH 077/144] [3.9] Create hello.txt.gz dynamically and improve related assertions (#8136) (#8157) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: J. Nick Koston --- .gitattributes | 1 - CHANGES/8136.contrib.rst | 7 ++ tests/hello.txt.gz | Bin 44 -> 0 bytes tests/test_web_sendfile_functional.py | 90 ++++++++++++++++---------- 4 files changed, 63 insertions(+), 35 deletions(-) create mode 100644 CHANGES/8136.contrib.rst delete mode 100644 tests/hello.txt.gz diff --git a/.gitattributes b/.gitattributes index 1fdd659bbc9..3e8722104e7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,2 @@ tests/data.unknown_mime_type binary -tests/hello.txt.gz binary tests/sample.* binary diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst new file mode 100644 index 00000000000..69718a4e0ab --- /dev/null +++ b/CHANGES/8136.contrib.rst @@ -0,0 +1,7 @@ +A pytest fixture ``hello_txt`` was introduced to aid +static file serving tests in +:file:`test_web_sendfile_functional.py`. It dynamically +provisions ``hello.txt`` file variants shared across the +tests in the module. + +-- by :user:`steverep` diff --git a/tests/hello.txt.gz b/tests/hello.txt.gz deleted file mode 100644 index 272fee26eb260f3b3c10b8c723e31841c5131253..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 44 zcmb2|=HR$ss2a+^oRON7ldo4&QNnQAOV diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index d67d67743ba..57ac0849efa 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -1,8 +1,9 @@ import asyncio +import gzip import pathlib import socket import zlib -from typing import Any, Iterable +from typing import Any, Iterable, Optional import pytest @@ -15,6 +16,24 @@ ssl = None # type: ignore +HELLO_AIOHTTP = b"Hello aiohttp! :-)\n" + + +@pytest.fixture(scope="module") +def hello_txt(request, tmp_path_factory) -> pathlib.Path: + """Create a temp path with hello.txt and compressed versions. + + The uncompressed text file path is returned by default. Alternatively, an + indirect parameter can be passed with an encoding to get a compressed path. + """ + txt = tmp_path_factory.mktemp("hello-") / "hello.txt" + hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello[None].write_bytes(HELLO_AIOHTTP) + hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + encoding = getattr(request, "param", None) + return hello[encoding] + + @pytest.fixture def loop_without_sendfile(loop): def sendfile(*args, **kwargs): @@ -201,11 +220,14 @@ async def handler(request): await client.close() -async def test_static_file_custom_content_type(aiohttp_client, sender) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" +@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +async def test_static_file_custom_content_type( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +) -> None: + """Test that custom type without encoding is returned for encoded request.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -215,22 +237,21 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - with filepath.open("rb") as f: - content = f.read() - assert content == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") is None + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == hello_txt.read_bytes() resp.close() await resp.release() await client.close() -async def test_static_file_custom_content_type_compress(aiohttp_client, sender): - filepath = pathlib.Path(__file__).parent / "hello.txt" +async def test_static_file_custom_content_type_compress( + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any +): + """Test that custom type with encoding is returned for unencoded requests.""" async def handler(request): - resp = sender(filepath, chunk_size=16) + resp = sender(hello_txt, chunk_size=16) resp.content_type = "application/pdf" return resp @@ -240,24 +261,26 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert b"hello aiohttp\n" == body - assert resp.headers["Content-Type"] == "application/pdf" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "application/pdf" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() -async def test_static_file_with_gziped_counter_part_enable_compression( - aiohttp_client: Any, sender: Any +@pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) +async def test_static_file_with_encoding_and_enable_compression( + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + forced_compression: Optional[web.ContentCoding], ): - """Test that enable_compression does not double compress when a .gz file is also present.""" - filepath = pathlib.Path(__file__).parent / "hello.txt" + """Test that enable_compression does not double compress when an encoded file is also present.""" async def handler(request): - resp = sender(filepath) - resp.enable_compression() + resp = sender(hello_txt) + resp.enable_compression(forced_compression) return resp app = web.Application() @@ -266,35 +289,34 @@ async def handler(request): resp = await client.get("/") assert resp.status == 200 - body = await resp.read() - assert body == b"hello aiohttp\n" - assert resp.headers["Content-Type"] == "text/plain" assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() await client.close() +@pytest.mark.parametrize( + ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] +) async def test_static_file_with_content_encoding( - aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str ) -> None: - filepath = pathlib.Path(__file__).parent / "hello.txt.gz" + """Test requesting static compressed files returns the correct content type and encoding.""" async def handler(request): - return sender(filepath) + return sender(hello_txt) app = web.Application() app.router.add_get("/", handler) client = await aiohttp_client(app) resp = await client.get("/") - assert 200 == resp.status - body = await resp.read() - assert b"hello aiohttp\n" == body - ct = resp.headers["CONTENT-TYPE"] - assert "text/plain" == ct - encoding = resp.headers["CONTENT-ENCODING"] - assert "gzip" == encoding + assert resp.status == 200 + assert resp.headers.get("Content-Encoding") == expect_encoding + assert resp.headers["Content-Type"] == "text/plain" + assert await resp.read() == HELLO_AIOHTTP resp.close() await resp.release() From cda4a8b79e63d07474a9caf78ceb970350f0e09b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Setla?= <38295919+setla@users.noreply.github.com> Date: Wed, 14 Feb 2024 13:18:15 +0100 Subject: [PATCH 078/144] [PR #6722/fb465e15 backport][3.10] Implement granular URL error hierarchy in the HTTP client (#8158) **This is a backport of PR #6722 as merged into master (fb465e155b872f01489173d11e35f02ccbf3a940).** This patch introduces 5 granular user-facing exceptions that may occur when HTTP requests are made: * `InvalidUrlClientError` * `RedirectClientError` * `NonHttpUrlClientError` * `InvalidUrlRedirectClientError` * `NonHttpUrlRedirectClientError` Previously `ValueError` or `InvalidURL` was raised and screening out was complicated (a valid URL that redirects to invalid one raised the same error as an invalid URL). Ref: https://github.com/aio-libs/aiohttp/pull/6722#discussion_r1477103562 PR #6722 Resolves #2507 Resolves #2630 Resolves #3315 Co-authored-by: Sviatoslav Sydorenko (cherry picked from commit fb465e155b872f01489173d11e35f02ccbf3a940) --- CHANGES/2507.feature.rst | 1 + CHANGES/3315.feature.rst | 1 + CHANGES/6722.feature | 12 +++ CONTRIBUTORS.txt | 1 + aiohttp/__init__.py | 10 +++ aiohttp/client.py | 53 +++++++++--- aiohttp/client_exceptions.py | 54 +++++++++++-- docs/client_reference.rst | 49 ++++++++++++ tests/test_client_exceptions.py | 25 +++++- tests/test_client_functional.py | 137 +++++++++++++++++++++++++++++++- 10 files changed, 321 insertions(+), 22 deletions(-) create mode 120000 CHANGES/2507.feature.rst create mode 120000 CHANGES/3315.feature.rst create mode 100644 CHANGES/6722.feature diff --git a/CHANGES/2507.feature.rst b/CHANGES/2507.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/2507.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/CHANGES/3315.feature.rst b/CHANGES/3315.feature.rst new file mode 120000 index 00000000000..f569cd92882 --- /dev/null +++ b/CHANGES/3315.feature.rst @@ -0,0 +1 @@ +6722.feature \ No newline at end of file diff --git a/CHANGES/6722.feature b/CHANGES/6722.feature new file mode 100644 index 00000000000..1dd253a0997 --- /dev/null +++ b/CHANGES/6722.feature @@ -0,0 +1,12 @@ +Added 5 new exceptions: :py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlClientError`, :py:exc:`~aiohttp.InvalidUrlRedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlRedirectClientError` + +:py:exc:`~aiohttp.InvalidUrlRedirectClientError`, :py:exc:`~aiohttp.NonHttpUrlRedirectClientError` +are raised instead of :py:exc:`ValueError` or :py:exc:`~aiohttp.InvalidURL` when the redirect URL is invalid. Classes +:py:exc:`~aiohttp.InvalidUrlClientError`, :py:exc:`~aiohttp.RedirectClientError`, +:py:exc:`~aiohttp.NonHttpUrlClientError` are base for them. + +The :py:exc:`~aiohttp.InvalidURL` now exposes a ``description`` property with the text explanation of the error details. + +-- by :user:`setla` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 8df68497dbe..c7e18d955e5 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -366,5 +366,6 @@ Yuvi Panda Zainab Lawal Zeal Wierslee Zlatan Sičanica +Łukasz Setla Марк Коренберг Семён Марьясин diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 3f8b2728863..5064b043006 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -25,7 +25,12 @@ ContentTypeError, Fingerprint, InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, NamedPipeConnector, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, RequestInfo, ServerConnectionError, ServerDisconnectedError, @@ -137,6 +142,11 @@ "ContentTypeError", "Fingerprint", "InvalidURL", + "InvalidUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlClientError", + "NonHttpUrlRedirectClientError", + "RedirectClientError", "RequestInfo", "ServerConnectionError", "ServerDisconnectedError", diff --git a/aiohttp/client.py b/aiohttp/client.py index 36dbf6a7119..8d8d13f25f7 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -52,6 +52,11 @@ ConnectionTimeoutError, ContentTypeError, InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, ServerConnectionError, ServerDisconnectedError, ServerFingerprintMismatch, @@ -109,6 +114,11 @@ "ConnectionTimeoutError", "ContentTypeError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", "ServerConnectionError", "ServerDisconnectedError", "ServerFingerprintMismatch", @@ -168,6 +178,7 @@ class ClientTimeout: # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) +HTTP_SCHEMA_SET = frozenset({"http", "https", ""}) _RetType = TypeVar("_RetType") _CharsetResolver = Callable[[ClientResponse, bytes], str] @@ -455,7 +466,10 @@ async def _request( try: url = self._build_url(str_or_url) except ValueError as e: - raise InvalidURL(str_or_url) from e + raise InvalidUrlClientError(str_or_url) from e + + if url.scheme not in HTTP_SCHEMA_SET: + raise NonHttpUrlClientError(url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: @@ -513,6 +527,15 @@ async def _request( retry_persistent_connection = method in IDEMPOTENT_METHODS while True: url, auth_from_url = strip_auth_from_url(url) + if not url.raw_host: + # NOTE: Bail early, otherwise, causes `InvalidURL` through + # NOTE: `self._request_class()` below. + err_exc_cls = ( + InvalidUrlRedirectClientError + if redirects + else InvalidUrlClientError + ) + raise err_exc_cls(url) if auth and auth_from_url: raise ValueError( "Cannot combine AUTH argument with " @@ -670,25 +693,35 @@ async def _request( resp.release() try: - parsed_url = URL( + parsed_redirect_url = URL( r_url, encoded=not self._requote_redirect_url ) - except ValueError as e: - raise InvalidURL(r_url) from e + raise InvalidUrlRedirectClientError( + r_url, + "Server attempted redirecting to a location that does not look like a URL", + ) from e - scheme = parsed_url.scheme - if scheme not in ("http", "https", ""): + scheme = parsed_redirect_url.scheme + if scheme not in HTTP_SCHEMA_SET: resp.close() - raise ValueError("Can redirect only to http or https") + raise NonHttpUrlRedirectClientError(r_url) elif not scheme: - parsed_url = url.join(parsed_url) + parsed_redirect_url = url.join(parsed_redirect_url) - if url.origin() != parsed_url.origin(): + try: + redirect_origin = parsed_redirect_url.origin() + except ValueError as origin_val_err: + raise InvalidUrlRedirectClientError( + parsed_redirect_url, + "Invalid redirect URL origin", + ) from origin_val_err + + if url.origin() != redirect_origin: auth = None headers.pop(hdrs.AUTHORIZATION, None) - url = parsed_url + url = parsed_redirect_url params = {} resp.release() continue diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 60bf058e887..f15a9ee3d3e 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -2,10 +2,10 @@ import asyncio import warnings -from typing import TYPE_CHECKING, Any, Optional, Tuple, Union +from typing import TYPE_CHECKING, Optional, Tuple, Union from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders +from .typedefs import LooseHeaders, StrOrURL try: import ssl @@ -41,6 +41,11 @@ "ContentTypeError", "ClientPayloadError", "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", ) @@ -281,17 +286,52 @@ class InvalidURL(ClientError, ValueError): # Derive from ValueError for backward compatibility - def __init__(self, url: Any) -> None: + def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: # The type of url is not yarl.URL because the exception can be raised # on URL(url) call - super().__init__(url) + self._url = url + self._description = description + + if description: + super().__init__(url, description) + else: + super().__init__(url) + + @property + def url(self) -> StrOrURL: + return self._url @property - def url(self) -> Any: - return self.args[0] + def description(self) -> "str | None": + return self._description def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.url}>" + return f"<{self.__class__.__name__} {self}>" + + def __str__(self) -> str: + if self._description: + return f"{self._url} - {self._description}" + return str(self._url) + + +class InvalidUrlClientError(InvalidURL): + """Invalid URL client error.""" + + +class RedirectClientError(ClientError): + """Client redirect error.""" + + +class NonHttpUrlClientError(ClientError): + """Non http URL client error.""" + + +class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): + """Invalid URL redirect client error.""" + + +class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): + """Non http URL redirect client error.""" class ClientSSLError(ClientConnectorError): diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 93b3459ba7c..838aee0c7d6 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -2115,6 +2115,41 @@ All exceptions are available as members of *aiohttp* module. Invalid URL, :class:`yarl.URL` instance. + .. attribute:: description + + Invalid URL description, :class:`str` instance or :data:`None`. + +.. exception:: InvalidUrlClientError + + Base class for all errors related to client url. + + Derived from :exc:`InvalidURL` + +.. exception:: RedirectClientError + + Base class for all errors related to client redirects. + + Derived from :exc:`ClientError` + +.. exception:: NonHttpUrlClientError + + Base class for all errors related to non http client urls. + + Derived from :exc:`ClientError` + +.. exception:: InvalidUrlRedirectClientError + + Redirect URL is malformed, e.g. it does not contain host part. + + Derived from :exc:`InvalidUrlClientError` and :exc:`RedirectClientError` + +.. exception:: NonHttpUrlRedirectClientError + + Redirect URL does not contain http schema. + + Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` + + .. class:: ContentDisposition Represent Content-Disposition header @@ -2331,3 +2366,17 @@ Hierarchy of exceptions * :exc:`WSServerHandshakeError` * :exc:`InvalidURL` + + * :exc:`InvalidUrlClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlClientError` + + * :exc:`NonHttpUrlRedirectClientError` + + * :exc:`RedirectClientError` + + * :exc:`InvalidUrlRedirectClientError` + + * :exc:`NonHttpUrlRedirectClientError` diff --git a/tests/test_client_exceptions.py b/tests/test_client_exceptions.py index f70ba5d09a6..d863d6674a3 100644 --- a/tests/test_client_exceptions.py +++ b/tests/test_client_exceptions.py @@ -5,6 +5,7 @@ from unittest import mock import pytest +from yarl import URL from aiohttp import client, client_reqrep @@ -298,8 +299,9 @@ def test_repr(self) -> None: class TestInvalidURL: def test_ctor(self) -> None: - err = client.InvalidURL(url=":wrong:url:") + err = client.InvalidURL(url=":wrong:url:", description=":description:") assert err.url == ":wrong:url:" + assert err.description == ":description:" def test_pickle(self) -> None: err = client.InvalidURL(url=":wrong:url:") @@ -310,10 +312,27 @@ def test_pickle(self) -> None: assert err2.url == ":wrong:url:" assert err2.foo == "bar" - def test_repr(self) -> None: + def test_repr_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") + assert err.args == (":wrong:url:",) assert repr(err) == "" - def test_str(self) -> None: + def test_repr_yarl_URL(self) -> None: + err = client.InvalidURL(url=URL(":wrong:url:")) + assert repr(err) == "" + + def test_repr_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert repr(err) == "" + + def test_str_no_description(self) -> None: err = client.InvalidURL(url=":wrong:url:") assert str(err) == ":wrong:url:" + + def test_none_description(self) -> None: + err = client.InvalidURL(":wrong:url:") + assert err.description is None + + def test_str_with_description(self) -> None: + err = client.InvalidURL(url=":wrong:url:", description=":description:") + assert str(err) == ":wrong:url: - :description:" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 654788afa72..4d804a31ddc 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -20,7 +20,14 @@ import aiohttp from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web from aiohttp.abc import AbstractResolver -from aiohttp.client_exceptions import SocketTimeoutError, TooManyRedirects +from aiohttp.client_exceptions import ( + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + SocketTimeoutError, + TooManyRedirects, +) from aiohttp.pytest_plugin import AiohttpClient, TestClient from aiohttp.test_utils import unused_port @@ -1121,7 +1128,7 @@ async def redirect(request): app.router.add_get("/redirect", redirect) client = await aiohttp_client(app) - with pytest.raises(ValueError): + with pytest.raises(NonHttpUrlRedirectClientError): await client.get("/redirect") @@ -2497,6 +2504,132 @@ async def handler_redirect(request): await client.post("/", chunked=1024) +INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW = ( + # yarl.URL.__new__ raises ValueError + ("http://:/", "http://:/"), + ("http://example.org:non_int_port/", "http://example.org:non_int_port/"), +) + +INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN = ( + # # yarl.URL.origin raises ValueError + ("http:/", "http:///"), + ("http:/example.com", "http:///example.com"), + ("http:///example.com", "http:///example.com"), +) + +NON_HTTP_URL_WITH_ERROR_MESSAGE = ( + ("call:+380123456789", r"call:\+380123456789"), + ("skype:handle", "skype:handle"), + ("slack://instance/room", "slack://instance/room"), + ("steam:code", "steam:code"), + ("twitter://handle", "twitter://handle"), + ("bluesky://profile/d:i:d", "bluesky://profile/d:i:d"), +) + + +@pytest.mark.parametrize( + ("url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, InvalidUrlClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + ), + *( + (url, message, NonHttpUrlClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_and_non_http_url( + url: Any, error_message_url: Any, expected_exception_class: Any +) -> None: + async with aiohttp.ClientSession() as http_session: + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await http_session.get(url) + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + headers = {hdrs.LOCATION: invalid_redirect_url} + + async def generate_redirecting_response(request): + return web.Response(status=301, headers=headers) + + app = web.Application() + app.router.add_get("/redirect", generate_redirecting_response) + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + +@pytest.mark.parametrize( + ("invalid_redirect_url", "error_message_url", "expected_exception_class"), + ( + *( + (url, message, InvalidUrlRedirectClientError) + for (url, message) in INVALID_URL_WITH_ERROR_MESSAGE_YARL_ORIGIN + + INVALID_URL_WITH_ERROR_MESSAGE_YARL_NEW + ), + *( + (url, message, NonHttpUrlRedirectClientError) + for (url, message) in NON_HTTP_URL_WITH_ERROR_MESSAGE + ), + ), +) +async def test_invalid_redirect_url_multiple_redirects( + aiohttp_client: Any, + invalid_redirect_url: Any, + error_message_url: str, + expected_exception_class: Any, +) -> None: + app = web.Application() + + for path, location in [ + ("/redirect", "/redirect1"), + ("/redirect1", "/redirect2"), + ("/redirect2", invalid_redirect_url), + ]: + + async def generate_redirecting_response(request): + return web.Response(status=301, headers={hdrs.LOCATION: location}) + + app.router.add_get(path, generate_redirecting_response) + + client = await aiohttp_client(app) + + with pytest.raises( + expected_exception_class, match=rf"^{error_message_url}( - [A-Za-z ]+)?" + ): + await client.get("/redirect") + + @pytest.mark.parametrize( ("status", "expected_ok"), ( From 6cb21d15cce9eb63d7b94be19e63b8061f4f8a05 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Wed, 14 Feb 2024 09:02:12 -0500 Subject: [PATCH 079/144] [3.10] Add server capability to check for Brotli compressed static files (#8160) Currently server only checks if static routes have a `.gz` extension and serves them with `gzip` encoding. These changes do the same for `.br` files with `br` encoding. Brotli is prioritized over gzip if both exist and are supported by the client, as it should almost always be a smaller content length. I considered making a check for which is smaller if both exist, but figured it wouldn't be worth the extra file system call in the vast majority of cases (at least not for typical web formats). Users should simply use gzip if it's smaller than Brotli for any file. Resolves #8062 Co-authored-by: J. Nick Koston Co-authored-by: Sviatoslav Sydorenko (cherry picked from commit dfc92967d10eb83a8d726c02c3de90da15f8335f) --- CHANGES/8062.feature.rst | 1 + aiohttp/web_fileresponse.py | 57 ++++++++++++++++----------- aiohttp/web_response.py | 1 + docs/web_reference.rst | 5 ++- tests/test_web_sendfile.py | 8 ++-- tests/test_web_sendfile_functional.py | 40 +++++++++++++++---- 6 files changed, 76 insertions(+), 36 deletions(-) create mode 100644 CHANGES/8062.feature.rst diff --git a/CHANGES/8062.feature.rst b/CHANGES/8062.feature.rst new file mode 100644 index 00000000000..6e9814f09a0 --- /dev/null +++ b/CHANGES/8062.feature.rst @@ -0,0 +1 @@ +Added server capability to check for static files with Brotli compression via a ``.br`` extension -- by :user:`steverep`. diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 7dbe50f0a5a..5b03bcc8350 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -2,6 +2,9 @@ import mimetypes import os import pathlib +import sys +from contextlib import suppress +from types import MappingProxyType from typing import ( # noqa IO, TYPE_CHECKING, @@ -40,6 +43,14 @@ NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) +if sys.version_info < (3, 9): + mimetypes.encodings_map[".br"] = "br" + +# File extension to IANA encodings map that will be checked in the order defined. +ENCODING_EXTENSIONS = MappingProxyType( + {ext: mimetypes.encodings_map[ext] for ext in (".br", ".gz")} +) + class FileResponse(StreamResponse): """A response object can be used to send files.""" @@ -124,34 +135,36 @@ async def _precondition_failed( self.content_length = 0 return await super().prepare(request) - def _get_file_path_stat_and_gzip( - self, check_for_gzipped_file: bool - ) -> Tuple[pathlib.Path, os.stat_result, bool]: - """Return the file path, stat result, and gzip status. + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]: + """Return the file path, stat result, and encoding. + + If an uncompressed file is returned, the encoding is set to + :py:data:`None`. This method should be called from a thread executor since it calls os.stat which may block. """ - filepath = self._path - if check_for_gzipped_file: - gzip_path = filepath.with_name(filepath.name + ".gz") - try: - return gzip_path, gzip_path.stat(), True - except OSError: - # Fall through and try the non-gzipped file - pass + file_path = self._path + for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): + if file_encoding not in accept_encoding: + continue + + compressed_path = file_path.with_suffix(file_path.suffix + file_extension) + with suppress(OSError): + return compressed_path, compressed_path.stat(), file_encoding - return filepath, filepath.stat(), False + # Fallback to the uncompressed file + return file_path, file_path.stat(), None async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: loop = asyncio.get_event_loop() # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 - check_for_gzipped_file = ( - "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() - ) - filepath, st, gzip = await loop.run_in_executor( - None, self._get_file_path_stat_and_gzip, check_for_gzipped_file + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + file_path, st, file_encoding = await loop.run_in_executor( + None, self._get_file_path_stat_encoding, accept_encoding ) etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" @@ -183,12 +196,12 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter return await self._not_modified(request, etag_value, last_modified) if hdrs.CONTENT_TYPE not in self.headers: - ct, encoding = mimetypes.guess_type(str(filepath)) + ct, encoding = mimetypes.guess_type(str(file_path)) if not ct: ct = "application/octet-stream" should_set_ct = True else: - encoding = "gzip" if gzip else None + encoding = file_encoding should_set_ct = False status = self._status @@ -269,7 +282,7 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter self.content_type = ct # type: ignore[assignment] if encoding: self.headers[hdrs.CONTENT_ENCODING] = encoding - if gzip: + if file_encoding: self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING # Disable compression if we are already sending # a compressed file since we don't want to double @@ -293,7 +306,7 @@ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter if count == 0 or must_be_empty_body(request.method, self.status): return await super().prepare(request) - fobj = await loop.run_in_executor(None, filepath.open, "rb") + fobj = await loop.run_in_executor(None, file_path.open, "rb") if start: # be aware that start could be None or int=0 here. offset = start else: diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 40d6f01ecaa..07030305329 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -52,6 +52,7 @@ BaseClass = collections.abc.MutableMapping +# TODO(py311): Convert to StrEnum for wider use class ContentCoding(enum.Enum): # The content codings that we have support for. # diff --git a/docs/web_reference.rst b/docs/web_reference.rst index e0ebbae1851..05f8085842d 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1846,8 +1846,9 @@ Application and Router system call even if the platform supports it. This can be accomplished by by setting environment variable ``AIOHTTP_NOSENDFILE=1``. - If a gzip version of the static content exists at file path + ``.gz``, it - will be used for the response. + If a Brotli or gzip compressed version of the static content exists at + the requested path with the ``.br`` or ``.gz`` extension, it will be used + for the response. Brotli will be preferred over gzip if both files exist. .. warning:: diff --git a/tests/test_web_sendfile.py b/tests/test_web_sendfile.py index d472c407b7a..ae4434e9ff6 100644 --- a/tests/test_web_sendfile.py +++ b/tests/test_web_sendfile.py @@ -20,7 +20,7 @@ def test_using_gzip_if_header_present_and_file_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath file_sender = FileResponse(filepath) file_sender._path = filepath @@ -41,7 +41,7 @@ def test_gzip_if_header_not_present_and_file_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -63,7 +63,7 @@ def test_gzip_if_header_not_present_and_file_not_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 @@ -87,7 +87,7 @@ def test_gzip_if_header_present_and_file_not_available(loop) -> None: filepath = mock.create_autospec(Path, spec_set=True) filepath.name = "logo.png" - filepath.with_name.return_value = gz_filepath + filepath.with_suffix.return_value = gz_filepath filepath.stat.return_value.st_size = 1024 filepath.stat.return_value.st_mtime_ns = 1603733507222449291 diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 57ac0849efa..93645094ef7 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -10,6 +10,11 @@ import aiohttp from aiohttp import web +try: + import brotlicffi as brotli +except ImportError: + import brotli + try: import ssl except ImportError: @@ -27,9 +32,14 @@ def hello_txt(request, tmp_path_factory) -> pathlib.Path: indirect parameter can be passed with an encoding to get a compressed path. """ txt = tmp_path_factory.mktemp("hello-") / "hello.txt" - hello = {None: txt, "gzip": txt.with_suffix(f"{txt.suffix}.gz")} + hello = { + None: txt, + "gzip": txt.with_suffix(f"{txt.suffix}.gz"), + "br": txt.with_suffix(f"{txt.suffix}.br"), + } hello[None].write_bytes(HELLO_AIOHTTP) hello["gzip"].write_bytes(gzip.compress(HELLO_AIOHTTP)) + hello["br"].write_bytes(brotli.compress(HELLO_AIOHTTP)) encoding = getattr(request, "param", None) return hello[encoding] @@ -220,7 +230,7 @@ async def handler(request): await client.close() -@pytest.mark.parametrize("hello_txt", ["gzip"], indirect=True) +@pytest.mark.parametrize("hello_txt", ["gzip", "br"], indirect=True) async def test_static_file_custom_content_type( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any ) -> None: @@ -245,8 +255,16 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) async def test_static_file_custom_content_type_compress( - hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any + hello_txt: pathlib.Path, + aiohttp_client: Any, + sender: Any, + accept_encoding: str, + expect_encoding: str, ): """Test that custom type with encoding is returned for unencoded requests.""" @@ -259,9 +277,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "application/pdf" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -269,11 +287,17 @@ async def handler(request): await client.close() +@pytest.mark.parametrize( + ("accept_encoding", "expect_encoding"), + [("gzip, deflate", "gzip"), ("gzip, deflate, br", "br")], +) @pytest.mark.parametrize("forced_compression", [None, web.ContentCoding.gzip]) async def test_static_file_with_encoding_and_enable_compression( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, + accept_encoding: str, + expect_encoding: str, forced_compression: Optional[web.ContentCoding], ): """Test that enable_compression does not double compress when an encoded file is also present.""" @@ -287,9 +311,9 @@ async def handler(request): app.router.add_get("/", handler) client = await aiohttp_client(app) - resp = await client.get("/") + resp = await client.get("/", headers={"Accept-Encoding": accept_encoding}) assert resp.status == 200 - assert resp.headers.get("Content-Encoding") == "gzip" + assert resp.headers.get("Content-Encoding") == expect_encoding assert resp.headers["Content-Type"] == "text/plain" assert await resp.read() == HELLO_AIOHTTP resp.close() @@ -298,7 +322,7 @@ async def handler(request): @pytest.mark.parametrize( - ("hello_txt", "expect_encoding"), [["gzip"] * 2], indirect=["hello_txt"] + ("hello_txt", "expect_encoding"), [["gzip"] * 2, ["br"] * 2], indirect=["hello_txt"] ) async def test_static_file_with_content_encoding( hello_txt: pathlib.Path, aiohttp_client: Any, sender: Any, expect_encoding: str From 87e06976a25050117f38d6e9a64c4202d814f388 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Fri, 16 Feb 2024 05:42:37 +0100 Subject: [PATCH 080/144] =?UTF-8?q?[PR=20#8089/dc38630b=20backport][3.9]?= =?UTF-8?q?=20=F0=9F=92=85=20Propagate=20error=20causes=20via=20asyncio=20?= =?UTF-8?q?protocols=20(#8162)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8089 as merged into master (dc38630b168a169139974617d75e176530c91696).** This is supposed to unify setting exceptions on the future objects, allowing to also attach their causes whenever available. It'll make possible for the end-users to see more detailed tracebacks. It's also supposed to help with tracking down what's happening with #4581. PR #8089 Co-Authored-By: J. Nick Koston Co-Authored-By: Sam Bull (cherry picked from commit dc38630b168a169139974617d75e176530c91696) --- CHANGES/8089.bugfix.rst | 3 ++ aiohttp/_http_parser.pyx | 12 ++++--- aiohttp/base_protocol.py | 7 +++- aiohttp/client_proto.py | 66 ++++++++++++++++++++++++++---------- aiohttp/client_reqrep.py | 34 ++++++++++++------- aiohttp/helpers.py | 36 ++++++++++++++++++-- aiohttp/http_parser.py | 27 ++++++++++----- aiohttp/http_websocket.py | 4 +-- aiohttp/streams.py | 32 +++++++++++++---- aiohttp/web_protocol.py | 4 +-- aiohttp/web_request.py | 3 +- aiohttp/web_ws.py | 4 +-- tests/test_base_protocol.py | 4 +-- tests/test_client_request.py | 6 ++-- tests/test_http_parser.py | 1 + 15 files changed, 177 insertions(+), 66 deletions(-) create mode 100644 CHANGES/8089.bugfix.rst diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst new file mode 100644 index 00000000000..7f47448478d --- /dev/null +++ b/CHANGES/8089.bugfix.rst @@ -0,0 +1,3 @@ +The asynchronous internals now set the underlying causes +when assigning exceptions to the future objects +-- by :user:`webknjaz`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 3f28fbdab43..7ea9b32ca55 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -19,7 +19,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD from yarl import URL as _URL from aiohttp import hdrs -from aiohttp.helpers import DEBUG +from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, @@ -763,11 +763,13 @@ cdef int cb_on_body(cparser.llhttp_t* parser, cdef bytes body = at[:length] try: pyparser._payload.feed_data(body, length) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) + reraised_exc = pyparser._payload_exception(str(underlying_exc)) + + set_exception(pyparser._payload, reraised_exc, underlying_exc) + pyparser._payload_error = 1 return -1 else: diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index 4c9f0a752e3..dc1f24f99cd 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -76,7 +77,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if exc is None: waiter.set_result(None) else: - waiter.set_exception(exc) + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) async def _drain_helper(self) -> None: if not self.connected: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index ca99808080d..723f5aae5f4 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -9,8 +9,14 @@ ServerDisconnectedError, ServerTimeoutError, ) -from .helpers import BaseTimerContext, status_code_must_be_empty_body +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + set_exception, + status_code_must_be_empty_body, +) from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader @@ -73,28 +79,50 @@ def is_connected(self) -> bool: def connection_lost(self, exc: Optional[BaseException]) -> None: self._drop_timeout() + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + if self._payload_parser is not None: - with suppress(Exception): + with suppress(Exception): # FIXME: log this somehow? self._payload_parser.feed_eof() uncompleted = None if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception as e: + except Exception as underlying_exc: if self._payload is not None: - exc = ClientPayloadError("Response payload is not completed") - exc.__cause__ = e - self._payload.set_exception(exc) + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) # assigns self._should_close to True as side effect, # we do it anyway below - self.set_exception(exc) + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) self._should_close = True self._parser = None @@ -102,7 +130,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: self._payload_parser = None self._reading_paused = False - super().connection_lost(exc) + super().connection_lost(reraised_exc) def eof_received(self) -> None: # should call parser.feed_eof() most likely @@ -116,10 +144,14 @@ def resume_reading(self) -> None: super().resume_reading() self._reschedule_timeout() - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._should_close = True self._drop_timeout() - super().set_exception(exc) + super().set_exception(exc, exc_cause) def set_parser(self, parser: Any, payload: Any) -> None: # TODO: actual types are: @@ -196,7 +228,7 @@ def _on_read_timeout(self) -> None: exc = ServerTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) def data_received(self, data: bytes) -> None: self._reschedule_timeout() @@ -222,14 +254,14 @@ def data_received(self, data: bytes) -> None: # parse http messages try: messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: + except BaseException as underlying_exc: if self.transport is not None: # connection.release() could be called BEFORE # data_received(), the transport is already # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(exc) + self.set_exception(HttpProcessingError(), underlying_exc) return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e0de951a33a..afe719da16e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -50,6 +50,7 @@ netrc_from_env, noop, reify, + set_exception, set_result, ) from .http import ( @@ -630,20 +631,29 @@ async def write_bytes( for chunk in self.body: await writer.write(chunk) # type: ignore[arg-type] - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - protocol.set_exception(exc) - else: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url + except OSError as underlying_exc: + reraised_exc = underlying_exc + + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) + + set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: await writer.write_eof() - except Exception as exc: - protocol.set_exception(exc) + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + f"Failed to send bytes into the underlying connection {conn !s}", + ), + underlying_exc, + ) else: await writer.write_eof() protocol.start_timeout() @@ -1086,7 +1096,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) + set_exception(content, ClientConnectionError("Connection closed")) self._released = True async def wait_for_close(self) -> None: diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index a5c762ed795..284033b7a04 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -810,9 +810,39 @@ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: fut.set_result(result) -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: + ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) @functools.total_ordering diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1877f558308..1301f025810 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -28,10 +28,12 @@ from .base_protocol import BaseProtocol from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor from .helpers import ( + _EXC_SENTINEL, DEBUG, NO_EXTENSIONS, BaseTimerContext, method_must_be_empty_body, + set_exception, status_code_must_be_empty_body, ) from .http_exceptions import ( @@ -446,13 +448,16 @@ def get_content_length() -> Optional[int]: assert self._payload_parser is not None try: eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) eof = True data = b"" @@ -834,7 +839,7 @@ def feed_data( exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) - self.payload.set_exception(exc) + set_exception(self.payload, exc) raise exc size = int(bytes(size_b), 16) @@ -939,8 +944,12 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) def feed_data(self, chunk: bytes, size: int) -> None: if not size: diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b63453f99e5..39f2e4a5c15 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,7 +25,7 @@ from .base_protocol import BaseProtocol from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS +from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue __all__ = ( @@ -314,7 +314,7 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return self._feed_data(data) except Exception as exc: self._exc = exc - self.queue.set_exception(exc) + set_exception(self.queue, exc) return True, b"" def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 3e4c355b5cb..b9b9c3fd96f 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -14,7 +14,13 @@ ) from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) from .log import internal_logger __all__ = ( @@ -146,19 +152,23 @@ def get_read_buffer_limits(self) -> Tuple[int, int]: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._exception = exc self._eof_callbacks.clear() waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) waiter = self._eof_waiter if waiter is not None: self._eof_waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def on_eof(self, callback: Callable[[], None]) -> None: if self._eof: @@ -513,7 +523,11 @@ def __repr__(self) -> str: def exception(self) -> Optional[BaseException]: return None - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: pass def on_eof(self, callback: Callable[[], None]) -> None: @@ -588,14 +602,18 @@ def at_eof(self) -> bool: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._eof = True self._exception = exc waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def feed_data(self, data: _T, size: int = 0) -> None: self._size += size diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index ec5856a0a22..f083b13eb0f 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -26,7 +26,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout +from .helpers import ceil_timeout, set_exception from .http import ( HttpProcessingError, HttpRequestParser, @@ -565,7 +565,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - payload.set_exception(PayloadAccessError()) + set_exception(payload, PayloadAccessError()) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 61fc831b032..781713e5985 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -48,6 +48,7 @@ parse_http_date, reify, sentinel, + set_exception, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -814,7 +815,7 @@ async def _prepare_hook(self, response: StreamResponse) -> None: return def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) class Request(BaseRequest): diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 783377716f5..d20a26ca470 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_result +from .helpers import call_later, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -526,4 +526,4 @@ async def __anext__(self) -> WSMessage: def _cancel(self, exc: BaseException) -> None: if self._reader is not None: - self._reader.set_exception(exc) + set_exception(self._reader, exc) diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py index b26011095e9..72c8c7c6b63 100644 --- a/tests/test_base_protocol.py +++ b/tests/test_base_protocol.py @@ -186,9 +186,9 @@ async def test_lost_drain_waited_exception() -> None: assert pr._drain_waiter is not None exc = RuntimeError() pr.connection_lost(exc) - with pytest.raises(RuntimeError) as cm: + with pytest.raises(ConnectionError, match=r"^Connection lost$") as cm: await t - assert cm.value is exc + assert cm.value.__cause__ is exc assert pr._drain_waiter is None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c54e1828e34..6084f685405 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,6 +14,7 @@ import aiohttp from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, ClientResponse, @@ -1096,9 +1097,8 @@ async def throw_exc(): # assert connection.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] - assert isinstance(outer_exc, ValueError) - assert inner_exc is outer_exc - assert inner_exc is outer_exc + assert isinstance(outer_exc, ClientConnectionError) + assert outer_exc.__cause__ is inner_exc await req.close() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 3fb0ab77d98..a37a08632d7 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -280,6 +280,7 @@ def test_parse_headers_longline(parser: Any) -> None: header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192 text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n" with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)): + # FIXME: `LineTooLong` doesn't seem to actually be happening parser.feed_data(text) From d4322e72f1eafc0a3c9513b966b1993fd73001dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?= =?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?= =?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?= Date: Fri, 16 Feb 2024 05:45:10 +0100 Subject: [PATCH 081/144] =?UTF-8?q?[PR=20#8089/dc38630b=20backport][3.10]?= =?UTF-8?q?=20=F0=9F=92=85=20Propagate=20error=20causes=20via=20asyncio=20?= =?UTF-8?q?protocols=20(#8161)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **This is a backport of PR #8089 as merged into master (dc38630b168a169139974617d75e176530c91696).** This is supposed to unify setting exceptions on the future objects, allowing to also attach their causes whenever available. It'll make possible for the end-users to see more detailed tracebacks. It's also supposed to help with tracking down what's happening with #4581. PR #8089 Co-Authored-By: J. Nick Koston Co-Authored-By: Sam Bull (cherry picked from commit dc38630b168a169139974617d75e176530c91696) --- CHANGES/8089.bugfix.rst | 3 ++ aiohttp/_http_parser.pyx | 12 ++++--- aiohttp/base_protocol.py | 7 +++- aiohttp/client_proto.py | 66 ++++++++++++++++++++++++++---------- aiohttp/client_reqrep.py | 34 ++++++++++++------- aiohttp/helpers.py | 36 ++++++++++++++++++-- aiohttp/http_parser.py | 27 ++++++++++----- aiohttp/http_websocket.py | 4 +-- aiohttp/streams.py | 32 +++++++++++++---- aiohttp/web_protocol.py | 4 +-- aiohttp/web_request.py | 3 +- aiohttp/web_ws.py | 4 +-- tests/test_base_protocol.py | 4 +-- tests/test_client_request.py | 6 ++-- tests/test_http_parser.py | 1 + 15 files changed, 177 insertions(+), 66 deletions(-) create mode 100644 CHANGES/8089.bugfix.rst diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst new file mode 100644 index 00000000000..7f47448478d --- /dev/null +++ b/CHANGES/8089.bugfix.rst @@ -0,0 +1,3 @@ +The asynchronous internals now set the underlying causes +when assigning exceptions to the future objects +-- by :user:`webknjaz`. diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx index 3f28fbdab43..7ea9b32ca55 100644 --- a/aiohttp/_http_parser.pyx +++ b/aiohttp/_http_parser.pyx @@ -19,7 +19,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD from yarl import URL as _URL from aiohttp import hdrs -from aiohttp.helpers import DEBUG +from aiohttp.helpers import DEBUG, set_exception from .http_exceptions import ( BadHttpMessage, @@ -763,11 +763,13 @@ cdef int cb_on_body(cparser.llhttp_t* parser, cdef bytes body = at[:length] try: pyparser._payload.feed_data(body, length) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if pyparser._payload_exception is not None: - pyparser._payload.set_exception(pyparser._payload_exception(str(exc))) - else: - pyparser._payload.set_exception(exc) + reraised_exc = pyparser._payload_exception(str(underlying_exc)) + + set_exception(pyparser._payload, reraised_exc, underlying_exc) + pyparser._payload_error = 1 return -1 else: diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index 4c9f0a752e3..dc1f24f99cd 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,6 +1,7 @@ import asyncio from typing import Optional, cast +from .helpers import set_exception from .tcp_helpers import tcp_nodelay @@ -76,7 +77,11 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: if exc is None: waiter.set_result(None) else: - waiter.set_exception(exc) + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) async def _drain_helper(self) -> None: if not self.connected: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index 1ab8acd27b0..28e9d3cd9e5 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -9,8 +9,14 @@ ServerDisconnectedError, SocketTimeoutError, ) -from .helpers import BaseTimerContext, status_code_must_be_empty_body +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + set_exception, + status_code_must_be_empty_body, +) from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader @@ -73,28 +79,50 @@ def is_connected(self) -> bool: def connection_lost(self, exc: Optional[BaseException]) -> None: self._drop_timeout() + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + if self._payload_parser is not None: - with suppress(Exception): + with suppress(Exception): # FIXME: log this somehow? self._payload_parser.feed_eof() uncompleted = None if self._parser is not None: try: uncompleted = self._parser.feed_eof() - except Exception as e: + except Exception as underlying_exc: if self._payload is not None: - exc = ClientPayloadError("Response payload is not completed") - exc.__cause__ = e - self._payload.set_exception(exc) + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) if not self.is_eof(): - if isinstance(exc, OSError): - exc = ClientOSError(*exc.args) - if exc is None: - exc = ServerDisconnectedError(uncompleted) + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) # assigns self._should_close to True as side effect, # we do it anyway below - self.set_exception(exc) + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) self._should_close = True self._parser = None @@ -102,7 +130,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: self._payload_parser = None self._reading_paused = False - super().connection_lost(exc) + super().connection_lost(reraised_exc) def eof_received(self) -> None: # should call parser.feed_eof() most likely @@ -116,10 +144,14 @@ def resume_reading(self) -> None: super().resume_reading() self._reschedule_timeout() - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._should_close = True self._drop_timeout() - super().set_exception(exc) + super().set_exception(exc, exc_cause) def set_parser(self, parser: Any, payload: Any) -> None: # TODO: actual types are: @@ -196,7 +228,7 @@ def _on_read_timeout(self) -> None: exc = SocketTimeoutError("Timeout on reading data from socket") self.set_exception(exc) if self._payload is not None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) def data_received(self, data: bytes) -> None: self._reschedule_timeout() @@ -222,14 +254,14 @@ def data_received(self, data: bytes) -> None: # parse http messages try: messages, upgraded, tail = self._parser.feed_data(data) - except BaseException as exc: + except BaseException as underlying_exc: if self.transport is not None: # connection.release() could be called BEFORE # data_received(), the transport is already # closed in this case self.transport.close() # should_close is True after the call - self.set_exception(exc) + self.set_exception(HttpProcessingError(), underlying_exc) return self._upgraded = upgraded diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index e0de951a33a..afe719da16e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -50,6 +50,7 @@ netrc_from_env, noop, reify, + set_exception, set_result, ) from .http import ( @@ -630,20 +631,29 @@ async def write_bytes( for chunk in self.body: await writer.write(chunk) # type: ignore[arg-type] - except OSError as exc: - if exc.errno is None and isinstance(exc, asyncio.TimeoutError): - protocol.set_exception(exc) - else: - new_exc = ClientOSError( - exc.errno, "Can not write request body for %s" % self.url + except OSError as underlying_exc: + reraised_exc = underlying_exc + + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", ) - new_exc.__context__ = exc - new_exc.__cause__ = exc - protocol.set_exception(new_exc) + + set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: await writer.write_eof() - except Exception as exc: - protocol.set_exception(exc) + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + f"Failed to send bytes into the underlying connection {conn !s}", + ), + underlying_exc, + ) else: await writer.write_eof() protocol.start_timeout() @@ -1086,7 +1096,7 @@ def _cleanup_writer(self) -> None: def _notify_content(self) -> None: content = self.content if content and content.exception() is None: - content.set_exception(ClientConnectionError("Connection closed")) + set_exception(content, ClientConnectionError("Connection closed")) self._released = True async def wait_for_close(self) -> None: diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index a5c762ed795..284033b7a04 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -810,9 +810,39 @@ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: fut.set_result(result) -def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None: - if not fut.done(): - fut.set_exception(exc) +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: + ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) @functools.total_ordering diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1877f558308..1301f025810 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -28,10 +28,12 @@ from .base_protocol import BaseProtocol from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor from .helpers import ( + _EXC_SENTINEL, DEBUG, NO_EXTENSIONS, BaseTimerContext, method_must_be_empty_body, + set_exception, status_code_must_be_empty_body, ) from .http_exceptions import ( @@ -446,13 +448,16 @@ def get_content_length() -> Optional[int]: assert self._payload_parser is not None try: eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) - except BaseException as exc: + except BaseException as underlying_exc: + reraised_exc = underlying_exc if self.payload_exception is not None: - self._payload_parser.payload.set_exception( - self.payload_exception(str(exc)) - ) - else: - self._payload_parser.payload.set_exception(exc) + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) eof = True data = b"" @@ -834,7 +839,7 @@ def feed_data( exc = TransferEncodingError( chunk[:pos].decode("ascii", "surrogateescape") ) - self.payload.set_exception(exc) + set_exception(self.payload, exc) raise exc size = int(bytes(size_b), 16) @@ -939,8 +944,12 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: else: self.decompressor = ZLibDecompressor(encoding=encoding) - def set_exception(self, exc: BaseException) -> None: - self.out.set_exception(exc) + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) def feed_data(self, chunk: bytes, size: int) -> None: if not size: diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index b63453f99e5..39f2e4a5c15 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -25,7 +25,7 @@ from .base_protocol import BaseProtocol from .compression_utils import ZLibCompressor, ZLibDecompressor -from .helpers import NO_EXTENSIONS +from .helpers import NO_EXTENSIONS, set_exception from .streams import DataQueue __all__ = ( @@ -314,7 +314,7 @@ def feed_data(self, data: bytes) -> Tuple[bool, bytes]: return self._feed_data(data) except Exception as exc: self._exc = exc - self.queue.set_exception(exc) + set_exception(self.queue, exc) return True, b"" def _feed_data(self, data: bytes) -> Tuple[bool, bytes]: diff --git a/aiohttp/streams.py b/aiohttp/streams.py index 3e4c355b5cb..b9b9c3fd96f 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -14,7 +14,13 @@ ) from .base_protocol import BaseProtocol -from .helpers import BaseTimerContext, TimerNoop, set_exception, set_result +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) from .log import internal_logger __all__ = ( @@ -146,19 +152,23 @@ def get_read_buffer_limits(self) -> Tuple[int, int]: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._exception = exc self._eof_callbacks.clear() waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) waiter = self._eof_waiter if waiter is not None: self._eof_waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def on_eof(self, callback: Callable[[], None]) -> None: if self._eof: @@ -513,7 +523,11 @@ def __repr__(self) -> str: def exception(self) -> Optional[BaseException]: return None - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: pass def on_eof(self, callback: Callable[[], None]) -> None: @@ -588,14 +602,18 @@ def at_eof(self) -> bool: def exception(self) -> Optional[BaseException]: return self._exception - def set_exception(self, exc: BaseException) -> None: + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: self._eof = True self._exception = exc waiter = self._waiter if waiter is not None: self._waiter = None - set_exception(waiter, exc) + set_exception(waiter, exc, exc_cause) def feed_data(self, data: _T, size: int = 0) -> None: self._size += size diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index ec5856a0a22..f083b13eb0f 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -26,7 +26,7 @@ from .abc import AbstractAccessLogger, AbstractStreamWriter from .base_protocol import BaseProtocol -from .helpers import ceil_timeout +from .helpers import ceil_timeout, set_exception from .http import ( HttpProcessingError, HttpRequestParser, @@ -565,7 +565,7 @@ async def start(self) -> None: self.log_debug("Uncompleted request.") self.close() - payload.set_exception(PayloadAccessError()) + set_exception(payload, PayloadAccessError()) except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 61fc831b032..781713e5985 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -48,6 +48,7 @@ parse_http_date, reify, sentinel, + set_exception, ) from .http_parser import RawRequestMessage from .http_writer import HttpVersion @@ -814,7 +815,7 @@ async def _prepare_hook(self, response: StreamResponse) -> None: return def _cancel(self, exc: BaseException) -> None: - self._payload.set_exception(exc) + set_exception(self._payload, exc) class Request(BaseRequest): diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 783377716f5..d20a26ca470 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -11,7 +11,7 @@ from . import hdrs from .abc import AbstractStreamWriter -from .helpers import call_later, set_result +from .helpers import call_later, set_exception, set_result from .http import ( WS_CLOSED_MESSAGE, WS_CLOSING_MESSAGE, @@ -526,4 +526,4 @@ async def __anext__(self) -> WSMessage: def _cancel(self, exc: BaseException) -> None: if self._reader is not None: - self._reader.set_exception(exc) + set_exception(self._reader, exc) diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py index b26011095e9..72c8c7c6b63 100644 --- a/tests/test_base_protocol.py +++ b/tests/test_base_protocol.py @@ -186,9 +186,9 @@ async def test_lost_drain_waited_exception() -> None: assert pr._drain_waiter is not None exc = RuntimeError() pr.connection_lost(exc) - with pytest.raises(RuntimeError) as cm: + with pytest.raises(ConnectionError, match=r"^Connection lost$") as cm: await t - assert cm.value is exc + assert cm.value.__cause__ is exc assert pr._drain_waiter is None diff --git a/tests/test_client_request.py b/tests/test_client_request.py index c54e1828e34..6084f685405 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -14,6 +14,7 @@ import aiohttp from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( ClientRequest, ClientResponse, @@ -1096,9 +1097,8 @@ async def throw_exc(): # assert connection.close.called assert conn.protocol.set_exception.called outer_exc = conn.protocol.set_exception.call_args[0][0] - assert isinstance(outer_exc, ValueError) - assert inner_exc is outer_exc - assert inner_exc is outer_exc + assert isinstance(outer_exc, ClientConnectionError) + assert outer_exc.__cause__ is inner_exc await req.close() diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index d306267c8bb..da7f1182b3a 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -280,6 +280,7 @@ def test_parse_headers_longline(parser: Any) -> None: header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192 text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n" with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)): + # FIXME: `LineTooLong` doesn't seem to actually be happening parser.feed_data(text) From 6b5cdef308a7b59b837b6936288531063256139d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 Feb 2024 11:38:37 +0000 Subject: [PATCH 082/144] Bump pip-tools from 7.3.0 to 7.4.0 (#8169) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pip-tools](https://github.com/jazzband/pip-tools) from 7.3.0 to 7.4.0.
Release notes

Sourced from pip-tools's releases.

7.4.0

Features:

Bug Fixes:

  • Fix for src-files not being used when specified in a config file (#2015). Thanks @​csalerno-asml
  • Fix ignorance of inverted CLI options in config for pip-sync (#1989). Thanks @​chrysle
  • Filter out origin ireqs for extra requirements before writing output annotations (#2011). Thanks @​chrysle
  • Make BacktrackingResolver ignore extras when dropping existing constraints (#1984). Thanks @​chludwig-haufe
  • Display pyproject.toml's metatada parsing errors in verbose mode (#1979). Thanks @​szobov

Other Changes:

  • Add mention of pip-compile-multi in Other useful tools README section (#1986). Thanks @​peterdemin
Changelog

Sourced from pip-tools's changelog.

v7.4.0

Features:

Bug Fixes:

  • Fix for src-files not being used when specified in a config file (#2015). Thanks @​csalerno-asml
  • Fix ignorance of inverted CLI options in config for pip-sync (#1989). Thanks @​chrysle
  • Filter out origin ireqs for extra requirements before writing output annotations (#2011). Thanks @​chrysle
  • Make BacktrackingResolver ignore extras when dropping existing constraints (#1984). Thanks @​chludwig-haufe
  • Display pyproject.toml's metatada parsing errors in verbose mode (#1979). Thanks @​szobov

Other Changes:

  • Add mention of pip-compile-multi in Other useful tools README section (#1986). Thanks @​peterdemin
Commits
  • 1397bfa Merge pull request #2043 from jazzband/dependabot/pip/docs/jinja2-3.1.3
  • 355a04e Bump jinja2 from 3.1.2 to 3.1.3 in /docs
  • c8f2988 Merge pull request #2055 from atugushev/fix-failing-test-on-pip-24
  • 598845a Fix failing test on pip-24.0
  • a8688d7 Merge pull request #2029 from jazzband/pre-commit-ci-update-config
  • 39cbff8 [pre-commit.ci] auto fixes from pre-commit.com hooks
  • 644ac8a [pre-commit.ci] pre-commit autoupdate
  • d673c8e Merge pull request #2015 from csalerno-asml/fix-2006
  • e216ad5 rm submodule
  • 64d1de3 comments addressed
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip-tools&package-manager=pip&previous-version=7.3.0&new-version=7.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 16 ++++++++++------ requirements/dev.txt | 12 ++++++++---- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1e3fb385de6..2d6e55bca9a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -32,7 +32,7 @@ blockdiag==2.0.1 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.9.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests @@ -94,7 +94,9 @@ idna==3.3 imagesize==1.3.0 # via sphinx importlib-metadata==7.0.0 - # via sphinx + # via + # build + # sphinx importlib-resources==6.1.1 # via towncrier incremental==22.10.0 @@ -126,13 +128,11 @@ packaging==21.2 # gunicorn # pytest # sphinx -pep517==0.12.0 - # via build pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.0 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv @@ -160,6 +160,10 @@ pyjwt==2.3.0 # pyjwt pyparsing==2.4.7 # via packaging +pyproject-hooks==1.0.0 + # via + # build + # pip-tools pytest==7.4.4 # via # -r requirements/lint.in @@ -227,8 +231,8 @@ tomli==2.0.1 # cherry-picker # coverage # mypy - # pep517 # pip-tools + # pyproject-hooks # pytest # slotscheck # towncrier diff --git a/requirements/dev.txt b/requirements/dev.txt index d9197e86828..51aed8b65b8 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -30,7 +30,7 @@ blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==0.10.0 +build==1.0.3 # via pip-tools certifi==2023.7.22 # via requests @@ -90,7 +90,9 @@ idna==3.4 imagesize==1.4.1 # via sphinx importlib-metadata==7.0.0 - # via sphinx + # via + # build + # sphinx importlib-resources==6.1.1 # via towncrier incremental==22.10.0 @@ -125,7 +127,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.3.0 +pip-tools==7.4.0 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv @@ -150,7 +152,9 @@ pyjwt==2.8.0 # gidgethub # pyjwt pyproject-hooks==1.0.0 - # via build + # via + # build + # pip-tools pytest==7.4.4 # via # -r requirements/lint.in From e74a4a02893c2b0a90ddcd8935ac9cdb946f2f1c Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:04:27 -0600 Subject: [PATCH 083/144] [PR #8163/006fbe03 backport][3.9] Avoid creating a task to do DNS resolution if there is no throttle (#8172) Co-authored-by: J. Nick Koston Fixes #123'). --> --- CHANGES/8163.bugfix.rst | 5 +++++ aiohttp/connector.py | 50 +++++++++++++++++++++++++++++------------ tests/test_connector.py | 6 +++++ 3 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8163.bugfix.rst diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst new file mode 100644 index 00000000000..8bfb10260c6 --- /dev/null +++ b/CHANGES/8163.bugfix.rst @@ -0,0 +1,5 @@ +Improved the DNS resolution performance on cache hit +-- by :user:`bdraco`. + +This is achieved by avoiding an :mod:`asyncio` task creation +in this case. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 3b9841dd094..f95ebe84c66 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -825,6 +825,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None ) -> List[Dict[str, Any]]: + """Resolve host and return list of addresses.""" if is_ip_address(host): return [ { @@ -852,8 +853,7 @@ async def _resolve_host( return res key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + if key in self._cached_hosts and not self._cached_hosts.expired(key): # get result early, before any await (#4014) result = self._cached_hosts.next_addrs(key) @@ -862,6 +862,39 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle event to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + resolved_host_task = asyncio.create_task( + self._resolve_host_with_throttle(key, host, port, traces) + ) + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + traces: Optional[List["Trace"]], + ) -> List[Dict[str, Any]]: + """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) event = self._throttle_dns_events[key] @@ -1163,22 +1196,11 @@ async def _create_direct_connection( host = host.rstrip(".") + "." port = req.port assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) try: # Cancelling this lookup should not cancel the underlying lookup # or else the cancel event will get broadcast to all the waiters # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise + hosts = await self._resolve_host(host, port, traces=traces) except OSError as exc: if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/test_connector.py b/tests/test_connector.py index 142abab3c15..02e48bc108b 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -767,6 +767,7 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: loop.create_task(conn._resolve_host("localhost", 8080)) loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -778,6 +779,9 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non r1 = loop.create_task(conn._resolve_host("localhost", 8080)) r2 = loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) assert r1.exception() == e assert r2.exception() == e @@ -792,6 +796,7 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop.create_task(conn._resolve_host("localhost", 8080)) f = loop.create_task(conn._resolve_host("localhost", 8080)) + await asyncio.sleep(0) await asyncio.sleep(0) await conn.close() @@ -956,6 +961,7 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) await asyncio.sleep(0) + await asyncio.sleep(0) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) From 88d224f7c53dd1ff410a5dccada6a778039adba7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 16:04:39 -0600 Subject: [PATCH 084/144] [PR #8163/006fbe03 backport][3.10] Avoid creating a task to do DNS resolution if there is no throttle (#8173) Co-authored-by: J. Nick Koston --- CHANGES/8163.bugfix.rst | 5 +++++ aiohttp/connector.py | 50 +++++++++++++++++++++++++++++------------ tests/test_connector.py | 6 +++++ 3 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 CHANGES/8163.bugfix.rst diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst new file mode 100644 index 00000000000..8bfb10260c6 --- /dev/null +++ b/CHANGES/8163.bugfix.rst @@ -0,0 +1,5 @@ +Improved the DNS resolution performance on cache hit +-- by :user:`bdraco`. + +This is achieved by avoiding an :mod:`asyncio` task creation +in this case. diff --git a/aiohttp/connector.py b/aiohttp/connector.py index d0954355244..64c678d4b78 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -835,6 +835,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None ) -> List[Dict[str, Any]]: + """Resolve host and return list of addresses.""" if is_ip_address(host): return [ { @@ -862,8 +863,7 @@ async def _resolve_host( return res key = (host, port) - - if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)): + if key in self._cached_hosts and not self._cached_hosts.expired(key): # get result early, before any await (#4014) result = self._cached_hosts.next_addrs(key) @@ -872,6 +872,39 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle event to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + resolved_host_task = asyncio.create_task( + self._resolve_host_with_throttle(key, host, port, traces) + ) + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + traces: Optional[List["Trace"]], + ) -> List[Dict[str, Any]]: + """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) event = self._throttle_dns_events[key] @@ -1224,22 +1257,11 @@ async def _create_direct_connection( host = host.rstrip(".") + "." port = req.port assert port is not None - host_resolved = asyncio.ensure_future( - self._resolve_host(host, port, traces=traces), loop=self._loop - ) try: # Cancelling this lookup should not cancel the underlying lookup # or else the cancel event will get broadcast to all the waiters # across all connections. - hosts = await asyncio.shield(host_resolved) - except asyncio.CancelledError: - - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: - with suppress(Exception, asyncio.CancelledError): - fut.result() - - host_resolved.add_done_callback(drop_exception) - raise + hosts = await self._resolve_host(host, port, traces=traces) except OSError as exc: if exc.errno is None and isinstance(exc, asyncio.TimeoutError): raise diff --git a/tests/test_connector.py b/tests/test_connector.py index fe027df896c..58f6c6a116d 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -1015,6 +1015,7 @@ async def test_tcp_connector_dns_throttle_requests(loop, dns_response) -> None: loop.create_task(conn._resolve_host("localhost", 8080)) loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) m_resolver().resolve.assert_called_once_with("localhost", 8080, family=0) @@ -1026,6 +1027,9 @@ async def test_tcp_connector_dns_throttle_requests_exception_spread(loop) -> Non r1 = loop.create_task(conn._resolve_host("localhost", 8080)) r2 = loop.create_task(conn._resolve_host("localhost", 8080)) await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) + await asyncio.sleep(0) assert r1.exception() == e assert r2.exception() == e @@ -1040,6 +1044,7 @@ async def test_tcp_connector_dns_throttle_requests_cancelled_when_close( loop.create_task(conn._resolve_host("localhost", 8080)) f = loop.create_task(conn._resolve_host("localhost", 8080)) + await asyncio.sleep(0) await asyncio.sleep(0) await conn.close() @@ -1204,6 +1209,7 @@ async def test_tcp_connector_dns_tracing_throttle_requests(loop, dns_response) - loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) loop.create_task(conn._resolve_host("localhost", 8080, traces=traces)) await asyncio.sleep(0) + await asyncio.sleep(0) on_dns_cache_hit.assert_called_once_with( session, trace_config_ctx, aiohttp.TraceDnsCacheHitParams("localhost") ) From 46675be68c0f60a414ca97fd72d7951624c99cb2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 12:25:47 +0000 Subject: [PATCH 085/144] Bump coverage from 7.4.1 to 7.4.2 (#8177) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.1 to 7.4.2.
Changelog

Sourced from coverage's changelog.

Version 7.4.2 — 2024-02-20

  • Fix: setting COVERAGE_CORE=sysmon no longer errors on 3.11 and lower, thanks Hugo van Kemenade <pull 1747_>_. It now issues a warning that sys.monitoring is not available and falls back to the default core instead.

.. _pull 1747: nedbat/coveragepy#1747

.. _changes_7-4-1:

Commits
  • 5d69334 test: if a test fails randomly, let it retry with @​flaky
  • 65d686c docs: sample HTML for 7.4.2
  • 026dca7 docs: prep for 7.4.2
  • a7d1022 build: some Makefile targets only make sense from the release steps
  • d365814 chore: make upgrade
  • c56c3a2 fix: issue a warning if we can't use sysmon as requested.
  • 8b0e039 fix: only use "sysmon" core when available (Python 3.12+) (#1747)
  • 575a44c build: temporarily pin to 3.13.0a3 for Windows (#1751)
  • 628c1c5 fix: avoid a dict-changed-size error
  • 8412054 chore: make upgrade
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.1&new-version=7.4.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 2d6e55bca9a..18a8baa1193 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 51aed8b65b8..04f527639ad 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 29021aecde1..8793b24283e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.1 +coverage==7.4.2 # via # -r requirements/test.in # pytest-cov From 73d15c481395701ab73b474076f9eff838c4cb8c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 11:08:46 +0000 Subject: [PATCH 086/144] Bump typing-extensions from 4.9.0 to 4.10.0 (#8189) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.9.0 to 4.10.0.
Release notes

Sourced from typing-extensions's releases.

4.10.0rc1

Release 4.10.0rc1 (February 17, 2024)

  • Add support for PEP 728, supporting the closed keyword argument and the special __extra_items__ key for TypedDict. Patch by Zixuan James Li.
  • Add support for PEP 742, adding typing_extensions.TypeIs. Patch by Jelle Zijlstra.
  • Drop runtime error when a read-only TypedDict item overrides a mutable one. Type checkers should still flag this as an error. Patch by Jelle Zijlstra.
  • Speedup issubclass() checks against simple runtime-checkable protocols by around 6% (backporting python/cpython#112717, by Alex Waygood).
  • Fix a regression in the implementation of protocols where typing.Protocol classes that were not marked as @runtime_checkable would be unnecessarily introspected, potentially causing exceptions to be raised if the protocol had problematic members. Patch by Alex Waygood, backporting python/cpython#113401.
Changelog

Sourced from typing-extensions's changelog.

Release 4.10.0 (February 24, 2024)

This feature release adds support for PEP 728 (TypedDict with extra items) and PEP 742 (TypeIs).

There are no changes since 4.10.0rc1.

Release 4.10.0rc1 (February 17, 2024)

  • Add support for PEP 728, supporting the closed keyword argument and the special __extra_items__ key for TypedDict. Patch by Zixuan James Li.
  • Add support for PEP 742, adding typing_extensions.TypeIs. Patch by Jelle Zijlstra.
  • Drop runtime error when a read-only TypedDict item overrides a mutable one. Type checkers should still flag this as an error. Patch by Jelle Zijlstra.
  • Speedup issubclass() checks against simple runtime-checkable protocols by around 6% (backporting python/cpython#112717, by Alex Waygood).
  • Fix a regression in the implementation of protocols where typing.Protocol classes that were not marked as @runtime_checkable would be unnecessarily introspected, potentially causing exceptions to be raised if the protocol had problematic members. Patch by Alex Waygood, backporting python/cpython#113401.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.9.0&new-version=4.10.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index e10f80a9cca..6bc448debd4 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 18a8baa1193..794783b353c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -246,7 +246,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/cython.txt b/requirements/cython.txt index 201da88c351..28456c88452 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.8 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 04f527639ad..eee4c68f3ae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -232,7 +232,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/lint.txt b/requirements/lint.txt index 1976da1d1ba..85af8510de9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 8793b24283e..26ff36ee802 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -111,7 +111,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index 8ea8d0d4d08..a7d80f5a0fd 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.9.0 +typing-extensions==4.10.0 # via -r requirements/typing-extensions.in From a75ed655d7200102bb0129445469a6f5b6fc7f45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 11:32:28 +0000 Subject: [PATCH 087/144] Bump python-on-whales from 0.68.0 to 0.69.0 (#8168) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.68.0 to 0.69.0.
Release notes

Sourced from python-on-whales's releases.

v0.69.0

What's Changed

Since this is a big release, I'll only put here the commits which have an impact on end users and remove internal refactoring.

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.68.0...v0.69.0

Commits
  • 4ed016d Bump version to 0.69.0
  • 6e5a7a9 Add support for podman --env-host flag when creating containers (#551)
  • ffb4e47 :sparkles: add platforms parameter to buildx.create (#541)
  • ae158ac Fixed pydantic error on service model (#548)
  • d8420f9 Add tty and interactive flags to container create and start (#545)
  • f9d129b Various tidyup for container cli_wrapper.py (#538)
  • 7775d2a Add some rules to ensure an healthy contributor experience
  • f2eb701 Tidy up ReloadableObjectFromJson._fetch_inspect_result_json() to return a loa...
  • 26302ea Move most of the remaining tests to use docker_client or ctr_client fixture (...
  • 3c3d0cb Convert test_network.py, not passing with podman yet (#533)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.68.0&new-version=0.69.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 794783b353c..280821c8f54 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index eee4c68f3ae..7906a47e8de 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index 26ff36ee802..00b974fe385 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.68.0 +python-on-whales==0.69.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From f92a3e39d95c40a0d95f3571f72674b06c98bec3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 10:31:41 +0000 Subject: [PATCH 088/144] Bump actions/cache from 4.0.0 to 4.0.1 (#8195) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.0.0 to 4.0.1.
Release notes

Sourced from actions/cache's releases.

v4.0.1

What's Changed

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v4...v4.0.1

Changelog

Sourced from actions/cache's changelog.

4.0.1

  • Updated isGhes check
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.0.0&new-version=4.0.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a0492bccd4a..765b4cb79dd 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -184,7 +184,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v4.0.0 + uses: actions/cache@v4.0.1 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 01531454b673be006fbe32f924e16b89652b427d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 22:22:29 +0000 Subject: [PATCH 089/144] [PR #8197/0e91eb0b backport][3.9] fix base_url param documentation (#8198) **This is a backport of PR #8197 as merged into master (0e91eb0bd94298e501084f245df44cc1772c49e5).** Co-authored-by: Alexis B <43278953+alexis974@users.noreply.github.com> --- CHANGES/8197.doc | 1 + docs/client_reference.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8197.doc diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc new file mode 100644 index 00000000000..ba4117768e8 --- /dev/null +++ b/CHANGES/8197.doc @@ -0,0 +1 @@ +Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index d0348d70ca8..fdf66e1bef0 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -59,8 +59,8 @@ The client session supports the context manager protocol for self closing. :param base_url: Base part of the URL (optional) If set, it allows to skip the base part (https://docs.aiohttp.org) in - request calls. If base_url includes a path (as in - https://docs.aiohttp.org/en/stable) the path is ignored/discarded. + request calls. It must not include a path (as in + https://docs.aiohttp.org/en/stable). .. versionadded:: 3.8 From e229a3a149bc1f7bfb6bfcd2bb095642cd1a9d3d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 22:34:25 +0000 Subject: [PATCH 090/144] [PR #8197/0e91eb0b backport][3.10] fix base_url param documentation (#8199) **This is a backport of PR #8197 as merged into master (0e91eb0bd94298e501084f245df44cc1772c49e5).** Co-authored-by: Alexis B <43278953+alexis974@users.noreply.github.com> --- CHANGES/8197.doc | 1 + docs/client_reference.rst | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8197.doc diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc new file mode 100644 index 00000000000..ba4117768e8 --- /dev/null +++ b/CHANGES/8197.doc @@ -0,0 +1 @@ +Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 838aee0c7d6..d25f381e03c 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -59,8 +59,8 @@ The client session supports the context manager protocol for self closing. :param base_url: Base part of the URL (optional) If set, it allows to skip the base part (https://docs.aiohttp.org) in - request calls. If base_url includes a path (as in - https://docs.aiohttp.org/en/stable) the path is ignored/discarded. + request calls. It must not include a path (as in + https://docs.aiohttp.org/en/stable). .. versionadded:: 3.8 From 0854d33b46351c380eb06cdff4eb37f4a00b79be Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 11:27:21 +0000 Subject: [PATCH 091/144] Bump cython from 3.0.8 to 3.0.9 (#8208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cython](https://github.com/cython/cython) from 3.0.8 to 3.0.9.
Changelog

Sourced from cython's changelog.

3.0.9 (2024-03-05)

Features added

  • Assigning const values to non-const variables now issues a warning. (Github issue :issue:5639)

  • Using noexcept on a function returning Python objects now issues a warning. (Github issue :issue:5661)

  • Some C-API usage was updated for the upcoming CPython 3.13. Patches by Victor Stinner et al. (Github issues :issue:6003, :issue:6020)

  • The deprecated Py_UNICODE type is no longer used, unless required by user code. (Github issue :issue:5982)

  • std::string.replace() declarations were added to libcpp.string. Patch by Kieran Geary. (Github issue :issue:6037)

Bugs fixed

  • Cython generates incorrect (but harmless) self-casts when directly calling final methods of subtypes. Lacking a better solution, the errors that recent gcc versions produce have been silenced for the time being. Original patch by Michał Górny. (Github issue :issue:2747)

  • Unused variable warnings about clineno were fixed when C lines in tracebacks are disabled. (Github issue :issue:6035)

  • Subclass deallocation of extern classes could crash if the base class uses GC. Original patch by Jason Fried. (Github issue :issue:5971)

  • Type checks for Python memoryview could use an invalid C function. Patch by Xenia Lu. (Github issue :issue:5988)

  • Calling final fused functions could generate invalid C code. (Github issue :issue:5989)

  • Declaring extern enums multiple times could generate invalid C code. (Github issue :issue:5905)

  • pyximport used relative paths incorrectly. Patch by Stefano Rivera. (Github issue :issue:5957)

  • Running Cython with globbing characters ([]*?) in the module search path could fail. Patch by eewanco. (Github issue :issue:5942)

... (truncated)

Commits
  • 31d4058 Update release date.
  • 9d9189b Build: Update cibuildwheel version.
  • 3cb9358 Update changelog.
  • 5689c9f Disable gcc warnings/errors about wrong self casts in final function calls (G...
  • 51d1ad0 Add fixing of warning in 3.0.9 to CHANGES.rst (#6041)
  • ec2a67d Update changelog.
  • 2da931d Disable GCC warnings/errors about wrong self casts in final function calls.
  • ba47941 Prepare release of 3.0.9.
  • bc683a4 Add missing replace functions to C++ std::string declarations (GH-6037)
  • a0a5e8d Update changelog.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.8&new-version=3.0.9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 280821c8f54..75645baa2d3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.8 +cython==3.0.9 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 28456c88452..f7fecfb3b95 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.8 +cython==3.0.9 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in From c427e6d8c30ba462d6a30129802bd4acde233216 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 9 Mar 2024 12:53:20 +0000 Subject: [PATCH 092/144] [PR #8211/7725f5a2 backport][3.9] Fix type annotations on MultipartWriter.append (#8214) **This is a backport of PR #8211 as merged into master (7725f5a22f4ca64dfb01478d640763910b036192).** Co-authored-by: Daniel Golding --- CHANGES/7741.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 8 ++++---- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7741.bugfix.rst diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst new file mode 100644 index 00000000000..9134e920c14 --- /dev/null +++ b/CHANGES/7741.bugfix.rst @@ -0,0 +1,3 @@ +Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, +:meth:`aiohttp.MultipartWriter.append_json` and +:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index be4a3ad48d4..ab889685fc8 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -80,6 +80,7 @@ Damien Nadé Dan King Dan Xu Daniel García +Daniel Golding Daniel Grossmann-Kavanagh Daniel Nelson Danny Song diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 602a6b67457..4471dd4bb7e 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -25,7 +25,7 @@ ) from urllib.parse import parse_qsl, unquote, urlencode -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping +from multidict import CIMultiDict, CIMultiDictProxy from .compression_utils import ZLibCompressor, ZLibDecompressor from .hdrs import ( @@ -791,7 +791,7 @@ def _boundary_value(self) -> str: def boundary(self) -> str: return self._boundary.decode("ascii") - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: if headers is None: headers = CIMultiDict() @@ -839,7 +839,7 @@ def append_payload(self, payload: Payload) -> Payload: return payload def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None + self, obj: Any, headers: Optional[Mapping[str, str]] = None ) -> Payload: """Helper to append JSON part.""" if headers is None: @@ -850,7 +850,7 @@ def append_json( def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, + headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) From 5fdb5c6ec13195b4bc63fbf120101c43970131e3 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 10 Mar 2024 12:25:42 +0000 Subject: [PATCH 093/144] [PR #8211/7725f5a2 backport][3.10] Fix type annotations on MultipartWriter.append (#8215) **This is a backport of PR #8211 as merged into master (7725f5a22f4ca64dfb01478d640763910b036192).** Co-authored-by: Daniel Golding --- CHANGES/7741.bugfix.rst | 3 +++ CONTRIBUTORS.txt | 1 + aiohttp/multipart.py | 8 ++++---- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 CHANGES/7741.bugfix.rst diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst new file mode 100644 index 00000000000..9134e920c14 --- /dev/null +++ b/CHANGES/7741.bugfix.rst @@ -0,0 +1,3 @@ +Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, +:meth:`aiohttp.MultipartWriter.append_json` and +:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index c7e18d955e5..6b53b5ad9c9 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -80,6 +80,7 @@ Damien Nadé Dan King Dan Xu Daniel García +Daniel Golding Daniel Grossmann-Kavanagh Daniel Nelson Danny Song diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 602a6b67457..4471dd4bb7e 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -25,7 +25,7 @@ ) from urllib.parse import parse_qsl, unquote, urlencode -from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping +from multidict import CIMultiDict, CIMultiDictProxy from .compression_utils import ZLibCompressor, ZLibDecompressor from .hdrs import ( @@ -791,7 +791,7 @@ def _boundary_value(self) -> str: def boundary(self) -> str: return self._boundary.decode("ascii") - def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload: + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: if headers is None: headers = CIMultiDict() @@ -839,7 +839,7 @@ def append_payload(self, payload: Payload) -> Payload: return payload def append_json( - self, obj: Any, headers: Optional[MultiMapping[str]] = None + self, obj: Any, headers: Optional[Mapping[str, str]] = None ) -> Payload: """Helper to append JSON part.""" if headers is None: @@ -850,7 +850,7 @@ def append_json( def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[MultiMapping[str]] = None, + headers: Optional[Mapping[str, str]] = None, ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) From 7fefba199329758c303a59997beeb7298183f381 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:01:37 +0000 Subject: [PATCH 094/144] Bump python-on-whales from 0.69.0 to 0.70.0 (#8220) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.69.0 to 0.70.0.
Release notes

Sourced from python-on-whales's releases.

v0.70.0

This update contains (hopefully) all fixes for the breaking changes introduced in docker compose v2.24.7. If you still encounter issues, file a issue here and downgrade docker compose to v2.24.6 in the meantime.

What's Changed

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.69.0...v0.70.0

Commits
  • 6573bf3 Bump version to 0.70.0
  • 68efb49 :bug: Fix parsing of compose file configs with compose v2.24.7 (#563)
  • e45f0cd feat(service): Add docker service ls --filter option (#561)
  • bf1c60d :sparkles: docker.compose.down() can take str as service arg (#562)
  • 20aebfb Fixpodman.image.exists (#558)
  • 4433a02 Add detach_keys argument for start and exec (#554)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.69.0&new-version=0.70.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 75645baa2d3..4c861d3674c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index 7906a47e8de..78fb9dcd256 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index 00b974fe385..520b4193417 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.12.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.69.0 +python-on-whales==0.70.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From 4a3bfa6183fc613033261970bb88d298387ad57d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:12:23 +0000 Subject: [PATCH 095/144] Bump mypy from 1.8.0 to 1.9.0 (#8222) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [mypy](https://github.com/python/mypy) from 1.8.0 to 1.9.0.
Changelog

Sourced from mypy's changelog.

Mypy Release Notes

Mypy 1.9

We’ve just uploaded mypy 1.9 to the Python Package Index (PyPI). Mypy is a static type checker for Python. This release includes new features, performance improvements and bug fixes. You can install it as follows:

python3 -m pip install -U mypy

You can read the full documentation for this release on Read the Docs.

Breaking Changes

Because the version of typeshed we use in mypy 1.9 doesn't support 3.7, neither does mypy 1.9. (Jared Hance, PR 16883)

We are planning to enable local partial types (enabled via the --local-partial-types flag) later this year by default. This change was announced years ago, but now it's finally happening. This is a major backward-incompatible change, so we'll probably include it as part of the upcoming mypy 2.0 release. This makes daemon and non-daemon mypy runs have the same behavior by default.

Local partial types can also be enabled in the mypy config file:

local_partial_types = True

We are looking at providing a tool to make it easier to migrate projects to use --local-partial-types, but it's not yet clear whether this is practical. The migration usually involves adding some explicit type annotations to module-level and class-level variables.

Basic Support for Type Parameter Defaults (PEP 696)

This release contains new experimental support for type parameter defaults (PEP 696). Please try it out! This feature was contributed by Marc Mueller.

Since this feature will be officially introduced in the next Python feature release (3.13), you will need to import TypeVar, ParamSpec or TypeVarTuple from typing_extensions to use defaults for now.

This example adapted from the PEP defines a default for BotT:

from typing import Generic
from typing_extensions import TypeVar

class Bot: ...

BotT = TypeVar("BotT", bound=Bot, default=Bot) </tr></table>

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=mypy&package-manager=pip&previous-version=1.8.0&new-version=1.9.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4c861d3674c..93b8f57862a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -114,7 +114,7 @@ multidict==6.0.5 # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 78fb9dcd256..87b696f50b2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 85af8510de9..37aeb9da0f9 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -22,7 +22,7 @@ identify==2.5.26 # via pre-commit iniconfig==2.0.0 # via pytest -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy diff --git a/requirements/test.txt b/requirements/test.txt index 520b4193417..57bd4431402 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.8.0 ; implementation_name == "cpython" +mypy==1.9.0 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy From 2ae85757644c1962522fe7f86466b7d50dbbd32f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 10:59:43 +0000 Subject: [PATCH 096/144] Bump pypa/cibuildwheel from 2.16.5 to 2.17.0 (#8226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.5 to 2.17.0.
Release notes

Sourced from pypa/cibuildwheel's releases.

v2.17.0

  • 🌟 Adds the ability to inherit configuration in TOML overrides. This makes certain configurations much simpler. If you're overriding an option like before-build or environment, and you just want to add an extra command or environment variable, you can just append (or prepend) to the previous config. See the docs for more information. (#1730)
  • 🌟 Adds official support for native arm64 macOS GitHub runners. To use them, just specify macos-14 as an os of your job in your workflow file. You can also keep macos-13 in your build matrix to build x86_64. Check out the new GitHub Actions example config.
  • ✨ You no longer need to specify --platform to run cibuildwheel locally! Instead it will detect your platform automatically. This was a safety feature, no longer necessary. (#1727)
  • 🛠 Removed setuptools and wheel pinned versions. This only affects old-style projects without a pyproject.toml, projects with pyproject.toml are already getting fresh versions of their build-system.requires installed into an isolated environment. (#1725)
  • 🛠 Improve how the GitHub Action passes arguments (#1757)
  • 🛠 Remove a system-wide install of pipx in the GitHub Action (#1745)
  • 🐛 No longer will cibuildwheel override the PIP_CONSTRAINT environment variable when using the build frontend. Instead it will be extended. (#1675)
  • 🐛 Fix a bug where building and testing both x86_86 and arm64 wheels on the same runner caused the wrong architectures in the test environment (#1750)
  • 🐛 Fix a bug that prevented testing a CPython 3.8 wheel targeting macOS 11+ on x86_64 (#1768)
  • 📚 Moved the docs onto the official PyPA domain - they're now available at https://cibuildwheel.pypa.io . (#1775)
  • 📚 Docs and examples improvements (#1762, #1734)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v2.17.0

11 March 2024

  • 🌟 Adds the ability to inherit configuration in TOML overrides. This makes certain configurations much simpler. If you're overriding an option like before-build or environment, and you just want to add an extra command or environment variable, you can just append (or prepend) to the previous config. See the docs for more information. (#1730)
  • 🌟 Adds official support for native arm64 macOS GitHub runners. To use them, just specify macos-14 as an os of your job in your workflow file. You can also keep macos-13 in your build matrix to build x86_64. Check out the new GitHub Actions example config.
  • ✨ You no longer need to specify --platform to run cibuildwheel locally! Instead it will detect your platform automatically. This was a safety feature, no longer necessary. (#1727)
  • 🛠 Removed setuptools and wheel pinned versions. This only affects old-style projects without a pyproject.toml, projects with pyproject.toml are already getting fresh versions of their build-system.requires installed into an isolated environment. (#1725)
  • 🛠 Improve how the GitHub Action passes arguments (#1757)
  • 🛠 Remove a system-wide install of pipx in the GitHub Action (#1745)
  • 🐛 No longer will cibuildwheel override the PIP_CONSTRAINT environment variable when using the build frontend. Instead it will be extended. (#1675)
  • 🐛 Fix a bug where building and testing both x86_86 and arm64 wheels on the same runner caused the wrong architectures in the test environment (#1750)
  • 🐛 Fix a bug that prevented testing a CPython 3.8 wheel targeting macOS 11+ on x86_64 (#1768)
  • 📚 Moved the docs onto the official PyPA domain - they're now available at https://cibuildwheel.pypa.io . (#1775)
  • 📚 Docs and examples improvements (#1762, #1734)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.5&new-version=2.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 765b4cb79dd..153422eb80a 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -370,7 +370,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v2.16.5 + uses: pypa/cibuildwheel@v2.17.0 env: CIBW_ARCHS_MACOS: x86_64 arm64 universal2 - uses: actions/upload-artifact@v3 From 30e5c04f3d252e2b46e58f918473d2c84db30c42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 11:04:49 +0000 Subject: [PATCH 097/144] Bump coverage from 7.4.2 to 7.4.4 (#8230) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.2 to 7.4.4.
Changelog

Sourced from coverage's changelog.

Version 7.4.4 — 2024-03-14

  • Fix: in some cases, even with [run] relative_files=True, a data file could be created with absolute path names. When combined with other relative data files, it was random whether the absolute file names would be made relative or not. If they weren't, then a file would be listed twice in reports, as detailed in issue 1752_. This is now fixed: absolute file names are always made relative when combining. Thanks to Bruno Rodrigues dos Santos for support.

  • Fix: the last case of a match/case statement had an incorrect message if the branch was missed. It said the pattern never matched, when actually the branch is missed if the last case always matched.

  • Fix: clicking a line number in the HTML report now positions more accurately.

  • Fix: the report:format setting was defined as a boolean, but should be a string. Thanks, Tanaydin Sirin <pull 1754_>_. It is also now documented on the :ref:configuration page <config_report_format>.

.. _issue 1752: nedbat/coveragepy#1752 .. _pull 1754: nedbat/coveragepy#1754

.. _changes_7-4-3:

Version 7.4.3 — 2024-02-23

  • Fix: in some cases, coverage could fail with a RuntimeError: "Set changed size during iteration." This is now fixed, closing issue 1733_.

.. _issue 1733: nedbat/coveragepy#1733

.. _changes_7-4-2:

Commits
  • bc5e2d7 docs: sample HTML for 7.4.4
  • 9b0008b docs: prep for 7.4.4
  • a536161 docs: thanks, Bruno Rodrigues dos Santos
  • e06e4f9 chore: make doc_upgrade
  • f30818e chore: make upgrade
  • 1b19799 fix: ensure absolute paths are relative when combined #1752
  • 1ef020d build: more cheats for convenient URLs
  • 3d57a07 docs: document the report:format setting
  • 8e30221 fix: correct the type of report:format in config.py (#1754)
  • 6289be8 refactor: use dataclasses, no namedtuple
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.4.2&new-version=7.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 93b8f57862a..38d52814b5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -54,7 +54,7 @@ click==8.0.3 # towncrier # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index 87b696f50b2..a31efef499e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -52,7 +52,7 @@ click==8.1.6 # towncrier # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 57bd4431402..c646c2e6c40 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -30,7 +30,7 @@ click==8.1.6 # via # typer # wait-for-it -coverage==7.4.2 +coverage==7.4.4 # via # -r requirements/test.in # pytest-cov From a459459bbf502eafd12d66903327376fa15dcf0f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 10:24:21 +0000 Subject: [PATCH 098/144] Bump dependabot/fetch-metadata from 1 to 2 (#8237) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dependabot/fetch-metadata](https://github.com/dependabot/fetch-metadata) from 1 to 2.
Release notes

Sourced from dependabot/fetch-metadata's releases.

v2.0.0 - Switch to node20

What's Changed

Full Changelog: https://github.com/dependabot/fetch-metadata/compare/v1.7.0...v2.0.0

v1.7.0

What's Changed

New Contributors

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dependabot/fetch-metadata&package-manager=github_actions&previous-version=1&new-version=2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/auto-merge.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 0b3e9c7c846..56575750fe1 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -12,7 +12,7 @@ jobs: steps: - name: Dependabot metadata id: metadata - uses: dependabot/fetch-metadata@v1 + uses: dependabot/fetch-metadata@v2 with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Enable auto-merge for Dependabot PRs From e2afa78815f8e430d8e944e4f826b1d7bf42c599 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:06:00 +0000 Subject: [PATCH 099/144] Bump pytest-mock from 3.12.0 to 3.14.0 (#8238) Bumps [pytest-mock](https://github.com/pytest-dev/pytest-mock) from 3.12.0 to 3.14.0.
Release notes

Sourced from pytest-mock's releases.

v3.14.0

  • #415: MockType and AsyncMockType can be imported from pytest_mock for type annotation purposes.
  • #420: Fixed a regression which would cause mocker.patch.object to not being properly cleared between tests.

v3.13.0

  • #417: spy now has spy_return_list, which is a list containing all the values returned by the spied function.
  • pytest-mock now requires pytest>=6.2.5.
  • #410: pytest-mock's setup.py file is removed. If you relied on this file, e.g. to install pytest using setup.py install, please see Why you shouldn't invoke setup.py directly for alternatives.
Changelog

Sourced from pytest-mock's changelog.

3.14.0 (2024-03-21)

  • [#415](https://github.com/pytest-dev/pytest-mock/issues/415) <https://github.com/pytest-dev/pytest-mock/pull/415>_: MockType and AsyncMockType can be imported from pytest_mock for type annotation purposes.

  • [#420](https://github.com/pytest-dev/pytest-mock/issues/420) <https://github.com/pytest-dev/pytest-mock/issues/420>_: Fixed a regression which would cause mocker.patch.object to not being properly cleared between tests.

3.13.0 (2024-03-21)

  • [#417](https://github.com/pytest-dev/pytest-mock/issues/417) <https://github.com/pytest-dev/pytest-mock/pull/417>_: spy now has spy_return_list, which is a list containing all the values returned by the spied function.
  • pytest-mock now requires pytest>=6.2.5.
  • [#410](https://github.com/pytest-dev/pytest-mock/issues/410) <https://github.com/pytest-dev/pytest-mock/pull/410>: pytest-mock's setup.py file is removed. If you relied on this file, e.g. to install pytest using setup.py install, please see Why you shouldn't invoke setup.py directly <https://blog.ganssle.io/articles/2021/10/setup-py-deprecated.html#summary> for alternatives.
Commits
  • 8733134 Update CHANGELOG for 3.14.0
  • 5257e3c Refactor MockCache to have a narrow interface
  • 4faf92a Fix regression with mocker.patch not being undone correctly
  • 6bd8712 Drop pre-Python 3.8 support code
  • 366966b Export MockType/AsyncMockType for type annotations (#415)
  • 852116b Merge pull request #418 from pytest-dev/release-3.13.0
  • ef9461b Add instructions on how to start deploy from command-line
  • 5b9d285 Release 3.13.0
  • 6d5d6dc Implement spy_return_list (#417)
  • dc28a0e [pre-commit.ci] pre-commit autoupdate (#416)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-mock&package-manager=pip&previous-version=3.12.0&new-version=3.14.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 38d52814b5a..e8c769d99c4 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -172,7 +172,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun diff --git a/requirements/dev.txt b/requirements/dev.txt index a31efef499e..66ab25a3135 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -163,7 +163,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun diff --git a/requirements/test.txt b/requirements/test.txt index c646c2e6c40..ff88321f859 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -84,7 +84,7 @@ pytest==7.4.4 # pytest-mock pytest-cov==4.1.0 # via -r requirements/test.in -pytest-mock==3.12.0 +pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun From 560c5ba7a07c1eac66fd37248b4507df2e950221 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:28:33 +0000 Subject: [PATCH 100/144] Bump python-on-whales from 0.70.0 to 0.70.1 (#8241) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.70.0 to 0.70.1.
Release notes

Sourced from python-on-whales's releases.

v0.70.1

What's Changed

New Contributors

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.70.0...v0.70.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.70.0&new-version=0.70.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e8c769d99c4..b595363e681 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/dev.txt b/requirements/dev.txt index 66ab25a3135..6216ee4a21d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -167,7 +167,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in pytz==2023.3.post1 # via babel diff --git a/requirements/test.txt b/requirements/test.txt index ff88321f859..fc8bb645447 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -88,7 +88,7 @@ pytest-mock==3.14.0 # via -r requirements/test.in python-dateutil==2.8.2 # via freezegun -python-on-whales==0.70.0 +python-on-whales==0.70.1 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in From 8e91d5c225e89a90d2f736acd893df1ee3db8034 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:43:42 +0000 Subject: [PATCH 101/144] Bump slotscheck from 0.17.3 to 0.18.0 (#8244) Bumps [slotscheck](https://github.com/ariebovenberg/slotscheck) from 0.17.3 to 0.18.0.
Release notes

Sourced from slotscheck's releases.

0.18.0

Improved robustness of importing of namespace packages and built-in modules (#227)

Changelog

Sourced from slotscheck's changelog.

0.18.0 (2024-03-21)

  • Improved robustness of importing of namespace packages and built-in modules (#227)
Commits
  • 8f0d153 prepare next release
  • eb42767 Merge pull request #227 from eltoder/feature/namespace-packages
  • 30f57c0 Fix importing of namespace packages and built-in modules
  • 2c2561c Merge pull request #226 from ariebovenberg/dependabot/pip/black-24.3.0
  • 5da086b Bump black from 24.2.0 to 24.3.0
  • f4fb06f Merge pull request #225 from ariebovenberg/dependabot/pip/pytest-8.1.1
  • 1cca361 Bump pytest from 8.1.0 to 8.1.1
  • 5a667c4 Merge pull request #224 from ariebovenberg/dependabot/pip/mypy-1.9.0
  • bca5db0 Bump mypy from 1.8.0 to 1.9.0
  • 8fece90 Merge pull request #223 from ariebovenberg/dependabot/pip/pytest-8.1.0
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.17.3&new-version=0.18.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b595363e681..d18ad046631 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -197,7 +197,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 6216ee4a21d..a5cd9432e6d 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -186,7 +186,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 37aeb9da0f9..618ecb20c42 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.17.3 +slotscheck==0.18.0 # via -r requirements/lint.in tomli==2.0.1 # via From dd3815165a16554e3edf9233e45c7e3a934605f7 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:27:59 -1000 Subject: [PATCH 102/144] [PR #8252/8f237126 backport][3.10] Fix handling of unsupported upgrades with the pure python http parser (#8255) Co-authored-by: J. Nick Koston --- CHANGES/8252.bugfix.rst | 2 ++ aiohttp/http_parser.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8252.bugfix.rst diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst new file mode 100644 index 00000000000..e932eb9c7ed --- /dev/null +++ b/CHANGES/8252.bugfix.rst @@ -0,0 +1,2 @@ +Fixed content not being read when an upgrade request was not supported with the pure Python implementation. +-- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1301f025810..8bd8519ff6b 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -228,6 +228,11 @@ def parse_headers( return (CIMultiDictProxy(headers), tuple(raw_headers)) +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + class HttpParser(abc.ABC, Generic[_MsgT]): lax: ClassVar[bool] = False @@ -354,7 +359,9 @@ def get_content_length() -> Optional[int]: if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) - self._upgraded = msg.upgrade + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) method = getattr(msg, "method", self.method) # code is only present on responses @@ -366,9 +373,8 @@ def get_content_length() -> Optional[int]: method and method_must_be_empty_body(method) ) if not empty_body and ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded ): payload = StreamReader( self.protocol, From 6e8f63cc7fd773276d9226195784f4eb132d5d65 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 09:35:57 -1000 Subject: [PATCH 103/144] [PR #8252/8f237126 backport][3.9] Fix handling of unsupported upgrades with the pure python http parser (#8254) Co-authored-by: J. Nick Koston --- CHANGES/8252.bugfix.rst | 2 ++ aiohttp/http_parser.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 CHANGES/8252.bugfix.rst diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst new file mode 100644 index 00000000000..e932eb9c7ed --- /dev/null +++ b/CHANGES/8252.bugfix.rst @@ -0,0 +1,2 @@ +Fixed content not being read when an upgrade request was not supported with the pure Python implementation. +-- by :user:`bdraco`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 1301f025810..8bd8519ff6b 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -228,6 +228,11 @@ def parse_headers( return (CIMultiDictProxy(headers), tuple(raw_headers)) +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + class HttpParser(abc.ABC, Generic[_MsgT]): lax: ClassVar[bool] = False @@ -354,7 +359,9 @@ def get_content_length() -> Optional[int]: if SEC_WEBSOCKET_KEY1 in msg.headers: raise InvalidHeader(SEC_WEBSOCKET_KEY1) - self._upgraded = msg.upgrade + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) method = getattr(msg, "method", self.method) # code is only present on responses @@ -366,9 +373,8 @@ def get_content_length() -> Optional[int]: method and method_must_be_empty_body(method) ) if not empty_body and ( - (length is not None and length > 0) - or msg.chunked - and not msg.upgrade + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded ): payload = StreamReader( self.protocol, From 73dfe9850e7e6419ab764f7f4cac15632085d137 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:20:13 +0000 Subject: [PATCH 104/144] Bump actions/setup-python from 4 to 5 (#7949) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/setup-python](https://github.com/actions/setup-python) from 4 to 5.
Release notes

Sourced from actions/setup-python's releases.

v5.0.0

What's Changed

In scope of this release, we update node version runtime from node16 to node20 (actions/setup-python#772). Besides, we update dependencies to the latest versions.

Full Changelog: https://github.com/actions/setup-python/compare/v4.8.0...v5.0.0

v4.8.0

What's Changed

In scope of this release we added support for GraalPy (actions/setup-python#694). You can use this snippet to set up GraalPy:

steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
  with:
    python-version: 'graalpy-22.3'
- run: python my_script.py

Besides, the release contains such changes as:

New Contributors

Full Changelog: https://github.com/actions/setup-python/compare/v4...v4.8.0

v4.7.1

What's Changed

Full Changelog: https://github.com/actions/setup-python/compare/v4...v4.7.1

v4.7.0

In scope of this release, the support for reading python version from pyproject.toml was added (actions/setup-python#669).

      - name: Setup Python
        uses: actions/setup-python@v4
</tr></table>

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 153422eb80a..b47f93bb2c8 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -43,7 +43,7 @@ jobs: make sync-direct-runtime-deps git diff --exit-code -- requirements/runtime-deps.in - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Cache PyPI @@ -175,7 +175,7 @@ jobs: submodules: true - name: Setup Python ${{ matrix.pyver }} id: python-install - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: allow-prereleases: true python-version: ${{ matrix.pyver }} @@ -290,7 +290,7 @@ jobs: with: submodules: true - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 - name: Update pip, wheel, setuptools, build, twine run: | python -m pip install -U pip wheel setuptools build twine @@ -351,7 +351,7 @@ jobs: fi shell: bash - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x - name: Update pip, wheel, setuptools, build, twine From b18f7b0a0e2852708446bb8b5a1d80e8cc24450a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:20:20 +0000 Subject: [PATCH 105/144] Bump pip-tools from 7.4.0 to 7.4.1 (#8210) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pip-tools](https://github.com/jazzband/pip-tools) from 7.4.0 to 7.4.1.
Release notes

Sourced from pip-tools's releases.

7.4.1

Bug Fixes:

Changelog

Sourced from pip-tools's changelog.

v7.4.1

05 Mar 2024

Bug Fixes:

Commits
  • 60ebdf5 Merge pull request #2067 from atugushev/release-7.4.1
  • c671ea0 Strip emoji from changelog line
  • f825385 Release 7.4.1
  • 1197151 Merge pull request #2038 from honnix/patch-1
  • 1f00154 Merge pull request #2061 from chrysle/pip-compile-docs-changedir
  • d99493c Skip constraint path check
  • 35b06db Change directory in pip-compile-docs tox session
  • a8beb7a Merge pull request #1981 from dragly/fix-all-extras
  • 7caff1a Merge branch 'main' into fix-all-extras
  • e0afb79 Merge pull request #2057 from jazzband/pre-commit-ci-update-config
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip-tools&package-manager=pip&previous-version=7.4.0&new-version=7.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d18ad046631..c2d84b3c07c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -132,7 +132,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.4.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==2.4.0 # via virtualenv diff --git a/requirements/dev.txt b/requirements/dev.txt index a5cd9432e6d..4658947d038 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -127,7 +127,7 @@ pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pip-tools==7.4.0 +pip-tools==7.4.1 # via -r requirements/dev.in platformdirs==3.10.0 # via virtualenv From 0689e6d5fc6f31ee24f242292b0599076f295a5f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:21:29 +0000 Subject: [PATCH 106/144] Bump pytest-cov from 4.1.0 to 5.0.0 (#8246) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-cov](https://github.com/pytest-dev/pytest-cov) from 4.1.0 to 5.0.0.
Changelog

Sourced from pytest-cov's changelog.

5.0.0 (2024-03-24)

  • Removed support for xdist rsync (now deprecated). Contributed by Matthias Reichenbach in [#623](https://github.com/pytest-dev/pytest-cov/issues/623) <https://github.com/pytest-dev/pytest-cov/pull/623>_.
  • Switched docs theme to Furo.
  • Various legacy Python cleanup and CI improvements. Contributed by Christian Clauss and Hugo van Kemenade in [#630](https://github.com/pytest-dev/pytest-cov/issues/630) <https://github.com/pytest-dev/pytest-cov/pull/630>, [#631](https://github.com/pytest-dev/pytest-cov/issues/631) <https://github.com/pytest-dev/pytest-cov/pull/631>, [#632](https://github.com/pytest-dev/pytest-cov/issues/632) <https://github.com/pytest-dev/pytest-cov/pull/632>_ and [#633](https://github.com/pytest-dev/pytest-cov/issues/633) <https://github.com/pytest-dev/pytest-cov/pull/633>_.
  • Added a pyproject.toml example in the docs. Contributed by Dawn James in [#626](https://github.com/pytest-dev/pytest-cov/issues/626) <https://github.com/pytest-dev/pytest-cov/pull/626>_.
  • Modernized project's pre-commit hooks to use ruff. Initial POC contributed by Christian Clauss in [#584](https://github.com/pytest-dev/pytest-cov/issues/584) <https://github.com/pytest-dev/pytest-cov/pull/584>_.
Commits
  • 5295ce0 Bump version: 4.1.0 → 5.0.0
  • 1181b06 Update changelog.
  • 9757222 Fix a minor grammar error (#636)
  • 9f5cd81 Cleanup releasing instructions. Closes #616.
  • 93b5047 Add test for pyproject.toml loading without explicit --cov-config. Ref #508.
  • ff50860 docs: add config instructions for pyproject.toml.
  • 4a5a4b5 Keep GitHub Actions up to date with GitHub's Dependabot
  • 1d7f559 Fix or remove URLs that are causing docs tests to fail
  • 6a5af8e Update changelog.
  • d9fe8df Switch to furo. Closes #618.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-cov&package-manager=pip&previous-version=4.1.0&new-version=5.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c2d84b3c07c..921d847d019 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -170,7 +170,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4658947d038..b170e76688e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -161,7 +161,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in diff --git a/requirements/test.txt b/requirements/test.txt index fc8bb645447..6f24fe2b1fa 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -82,7 +82,7 @@ pytest==7.4.4 # -r requirements/test.in # pytest-cov # pytest-mock -pytest-cov==4.1.0 +pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in From 8c39b439573a39c46556fdd2872794f7a6ac2d41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 21:23:09 +0000 Subject: [PATCH 107/144] Bump slotscheck from 0.18.0 to 0.19.0 (#8249) Bumps [slotscheck](https://github.com/ariebovenberg/slotscheck) from 0.18.0 to 0.19.0.
Release notes

Sourced from slotscheck's releases.

0.19.0

Improved support for implicit/native namespace packages (#228, #230)

Changelog

Sourced from slotscheck's changelog.

0.19.0 (2024-03-25)

  • Improved support for implicit/native namespace packages (#228, #230)
Commits
  • e784c87 prepare next release
  • f5b1782 Merge pull request #231 from ariebovenberg/dependabot/pip/pytest-cov-5.0.0
  • 1d47c42 Bump pytest-cov from 4.1.0 to 5.0.0
  • 3884ca7 Merge pull request #230 from eltoder/feature/packages-multiple-paths
  • 462dc95 Support packages that span multiple directories
  • bd1b80c Merge pull request #228 from eltoder/feature/discover-namespace-packages
  • 9f60d08 Amend docs about subdirectory module discovery
  • 908f6d4 Address review comments
  • 8626cc2 improve error message on module not found on path
  • 9d6ae69 Update documentation
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=slotscheck&package-manager=pip&previous-version=0.18.0&new-version=0.19.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 921d847d019..fa5adf12e1a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -197,7 +197,7 @@ six==1.16.0 # via # python-dateutil # virtualenv -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index b170e76688e..7df659854e5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -186,7 +186,7 @@ setuptools-git==1.2 # via -r requirements/test.in six==1.16.0 # via python-dateutil -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in snowballstemmer==2.2.0 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 618ecb20c42..59da6563db3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -40,7 +40,7 @@ pytest==7.4.4 # via -r requirements/lint.in pyyaml==6.0.1 # via pre-commit -slotscheck==0.18.0 +slotscheck==0.19.0 # via -r requirements/lint.in tomli==2.0.1 # via From 58dec1d0477c2d807e3eb0f42c29340c8fcd9e09 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 28 Mar 2024 12:14:22 -1000 Subject: [PATCH 108/144] Ensure websocket transport is closed when client does not close it (#8200) (#8256) --- CHANGES/8200.bugfix.rst | 6 ++ aiohttp/web_ws.py | 21 ++++-- tests/test_web_websocket.py | 89 +++++++++++++++++++++++++- tests/test_web_websocket_functional.py | 33 +++++++++- 4 files changed, 141 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8200.bugfix.rst diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst new file mode 100644 index 00000000000..e4492a8a84c --- /dev/null +++ b/CHANGES/8200.bugfix.rst @@ -0,0 +1,6 @@ +Ensure websocket transport is closed when client does not close it +-- by :user:`bdraco`. + +The transport could remain open if the client did not close it. This +change ensures the transport is closed when the client does not close +it. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index d20a26ca470..52604d8a1eb 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -395,6 +395,7 @@ async def close( return True if self._closing: + self._close_transport() return True reader = self._reader @@ -418,9 +419,18 @@ async def close( self._exception = asyncio.TimeoutError() return True + def _set_closing(self, code: WSCloseCode) -> None: + """Set the close code and mark the connection as closing.""" + self._closing = True + self._close_code = code + def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" self._close_code = code + self._close_transport() + + def _close_transport(self) -> None: + """Close the transport.""" if self._req is not None and self._req.transport is not None: self._req.transport.close() @@ -465,14 +475,12 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data + self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: # The client is likely going to close the @@ -481,7 +489,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # likely result writing to a broken pipe. await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: - self._closing = True + self._set_closing(WSCloseCode.OK) elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue @@ -525,5 +533,8 @@ async def __anext__(self) -> WSMessage: return msg def _cancel(self, exc: BaseException) -> None: + # web_protocol calls this from connection_lost + # or when the server is shutting down. + self._closing = True if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index d0aca0c019a..543fe91db07 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -371,7 +371,94 @@ async def test_receive_eofstream_in_reader(make_request, loop) -> None: assert ws.closed -async def test_receive_timeouterror(make_request, loop) -> None: +async def test_receive_exception_in_reader(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + + ws._reader = mock.Mock() + exc = Exception() + res = loop.create_future() + res.set_exception(exc) + ws._reader.read = make_mocked_coro(res) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.ERROR + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_close_but_left_open(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + close_message = WSMessage(WSMsgType.CLOSE, 1000, "close") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=close_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + read_mock = mock.AsyncMock(return_value=closing_message) + ws._reader.read = read_mock + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + ws._cancel(ConnectionResetError("Connection lost")) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + +async def test_close_after_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=closing_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + assert len(ws._req.transport.close.mock_calls) == 0 + + await ws.close() + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index da855a4b7c1..7af4c6091eb 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,7 @@ # HTTP websocket server functional tests import asyncio +from typing import Any, Optional import pytest @@ -258,7 +259,7 @@ async def handler(request): assert "reply" == (await ws.receive_str()) # The server closes here. Then the client sends bogus messages with an - # internval shorter than server-side close timeout, to make the server + # interval shorter than server-side close timeout, to make the server # hanging indefinitely. await asyncio.sleep(0.08) msg = await ws._reader.read() @@ -310,8 +311,36 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED -async def test_auto_pong_with_closing_by_peer(loop, aiohttp_client) -> None: +async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: + srv_ws: Optional[web.WebSocketResponse] = None + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + await asyncio.sleep(0) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws: web.WebSocketResponse = await client.ws_connect("/", protocols=("eggs", "bar")) + + await ws._writer._send_frame(b"", WSMsgType.CLOSE) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + +async def test_auto_pong_with_closing_by_peer(loop: Any, aiohttp_client: Any) -> None: closed = loop.create_future() async def handler(request): From 1746d2d24eb9d748d81d71e59d8b282b335d9dc8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 22:54:44 +0000 Subject: [PATCH 109/144] [PR #8259/f78ef6f8 backport][3.9] Flag python 3.11/12 support (#8261) **This is a backport of PR #8259 as merged into master (f78ef6f8e9e3a9503f3a17357cb4aad63c0df43d).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 24 ++---------------------- setup.cfg | 2 ++ 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index b47f93bb2c8..36f7b9a137f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10'] + pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -141,28 +141,8 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: macos - pyver: "3.11" - experimental: true - no-extensions: 'Y' - os: ubuntu - pyver: "3.11" - experimental: false - no-extensions: 'Y' - - os: windows - pyver: "3.11" - experimental: true - no-extensions: 'Y' - - os: ubuntu - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: macos - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: windows - pyver: "3.12" + pyver: "3.13" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/setup.cfg b/setup.cfg index 83da3961014..c514bab9f94 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,6 +36,8 @@ classifiers = Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Topic :: Internet :: WWW/HTTP From 259293f8c392b4ca276360d5434627f7c8fd3e34 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 28 Mar 2024 13:16:31 -1000 Subject: [PATCH 110/144] Ensure websocket transport is closed when client does not close it (#8200) (#8257) --- CHANGES/8200.bugfix.rst | 6 ++ aiohttp/web_ws.py | 21 ++++-- tests/test_web_websocket.py | 89 +++++++++++++++++++++++++- tests/test_web_websocket_functional.py | 33 +++++++++- 4 files changed, 141 insertions(+), 8 deletions(-) create mode 100644 CHANGES/8200.bugfix.rst diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst new file mode 100644 index 00000000000..e4492a8a84c --- /dev/null +++ b/CHANGES/8200.bugfix.rst @@ -0,0 +1,6 @@ +Ensure websocket transport is closed when client does not close it +-- by :user:`bdraco`. + +The transport could remain open if the client did not close it. This +change ensures the transport is closed when the client does not close +it. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index d20a26ca470..52604d8a1eb 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -395,6 +395,7 @@ async def close( return True if self._closing: + self._close_transport() return True reader = self._reader @@ -418,9 +419,18 @@ async def close( self._exception = asyncio.TimeoutError() return True + def _set_closing(self, code: WSCloseCode) -> None: + """Set the close code and mark the connection as closing.""" + self._closing = True + self._close_code = code + def _set_code_close_transport(self, code: WSCloseCode) -> None: """Set the close code and close the transport.""" self._close_code = code + self._close_transport() + + def _close_transport(self) -> None: + """Close the transport.""" if self._req is not None and self._req.transport is not None: self._req.transport.close() @@ -465,14 +475,12 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return WSMessage(WSMsgType.ERROR, exc, None) except Exception as exc: self._exception = exc - self._closing = True - self._close_code = WSCloseCode.ABNORMAL_CLOSURE + self._set_closing(WSCloseCode.ABNORMAL_CLOSURE) await self.close() return WSMessage(WSMsgType.ERROR, exc, None) if msg.type == WSMsgType.CLOSE: - self._closing = True - self._close_code = msg.data + self._set_closing(msg.data) # Could be closed while awaiting reader. if not self._closed and self._autoclose: # The client is likely going to close the @@ -481,7 +489,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: # likely result writing to a broken pipe. await self.close(drain=False) elif msg.type == WSMsgType.CLOSING: - self._closing = True + self._set_closing(WSCloseCode.OK) elif msg.type == WSMsgType.PING and self._autoping: await self.pong(msg.data) continue @@ -525,5 +533,8 @@ async def __anext__(self) -> WSMessage: return msg def _cancel(self, exc: BaseException) -> None: + # web_protocol calls this from connection_lost + # or when the server is shutting down. + self._closing = True if self._reader is not None: set_exception(self._reader, exc) diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index d0aca0c019a..543fe91db07 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -371,7 +371,94 @@ async def test_receive_eofstream_in_reader(make_request, loop) -> None: assert ws.closed -async def test_receive_timeouterror(make_request, loop) -> None: +async def test_receive_exception_in_reader(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + + ws._reader = mock.Mock() + exc = Exception() + res = loop.create_future() + res.set_exception(exc) + ws._reader.read = make_mocked_coro(res) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.ERROR + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_close_but_left_open(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + close_message = WSMessage(WSMsgType.CLOSE, 1000, "close") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=close_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + read_mock = mock.AsyncMock(return_value=closing_message) + ws._reader.read = read_mock + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + + ws._cancel(ConnectionResetError("Connection lost")) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + + +async def test_close_after_closing(make_request: Any, loop: Any) -> None: + req = make_request("GET", "/") + ws = WebSocketResponse() + await ws.prepare(req) + closing_message = WSMessage(WSMsgType.CLOSING, 1000, "closing") + + ws._reader = mock.Mock() + ws._reader.read = mock.AsyncMock(return_value=closing_message) + ws._payload_writer.drain = mock.Mock() + ws._payload_writer.drain.return_value = loop.create_future() + ws._payload_writer.drain.return_value.set_result(True) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSING + assert not ws.closed + assert len(ws._req.transport.close.mock_calls) == 0 + + await ws.close() + assert ws.closed + assert len(ws._req.transport.close.mock_calls) == 1 + + +async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: req = make_request("GET", "/") ws = WebSocketResponse() await ws.prepare(req) diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index da855a4b7c1..7af4c6091eb 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,7 @@ # HTTP websocket server functional tests import asyncio +from typing import Any, Optional import pytest @@ -258,7 +259,7 @@ async def handler(request): assert "reply" == (await ws.receive_str()) # The server closes here. Then the client sends bogus messages with an - # internval shorter than server-side close timeout, to make the server + # interval shorter than server-side close timeout, to make the server # hanging indefinitely. await asyncio.sleep(0.08) msg = await ws._reader.read() @@ -310,8 +311,36 @@ async def handler(request): assert msg.type == WSMsgType.CLOSED -async def test_auto_pong_with_closing_by_peer(loop, aiohttp_client) -> None: +async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: + srv_ws: Optional[web.WebSocketResponse] = None + async def handler(request): + nonlocal srv_ws + ws = srv_ws = web.WebSocketResponse(protocols=("foo", "bar")) + await ws.prepare(request) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + await asyncio.sleep(0) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws: web.WebSocketResponse = await client.ws_connect("/", protocols=("eggs", "bar")) + + await ws._writer._send_frame(b"", WSMsgType.CLOSE) + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + + await asyncio.sleep(0) + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSED + + +async def test_auto_pong_with_closing_by_peer(loop: Any, aiohttp_client: Any) -> None: closed = loop.create_future() async def handler(request): From 039da6f66fb6681ab2931770352f1dec661db02b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 23:58:06 +0000 Subject: [PATCH 111/144] Bump actions/cache from 4.0.1 to 4.0.2 (#8236) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.0.1 to 4.0.2.
Release notes

Sourced from actions/cache's releases.

v4.0.2

What's Changed

Full Changelog: https://github.com/actions/cache/compare/v4.0.1...v4.0.2

Changelog

Sourced from actions/cache's changelog.

4.0.2

  • Fixed restore fail-on-cache-miss not working.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.0.1&new-version=4.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .github/workflows/ci-cd.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 36f7b9a137f..0b9c1dbcb96 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -47,7 +47,7 @@ jobs: with: python-version: 3.9 - name: Cache PyPI - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 with: key: pip-lint-${{ hashFiles('requirements/*.txt') }} path: ~/.cache/pip @@ -99,7 +99,7 @@ jobs: with: submodules: true - name: Cache llhttp generated files - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 id: cache with: key: llhttp-${{ hashFiles('vendor/llhttp/package*.json', 'vendor/llhttp/src/**/*') }} @@ -164,7 +164,7 @@ jobs: run: | echo "::set-output name=dir::$(pip cache dir)" # - name: Cache - name: Cache PyPI - uses: actions/cache@v4.0.1 + uses: actions/cache@v4.0.2 with: key: pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-${{ hashFiles('requirements/*.txt') }} path: ${{ steps.pip-cache.outputs.dir }} From 5c248fcc8c6d39c01ebec60418c8a0ae500ce094 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 23:39:49 +0000 Subject: [PATCH 112/144] [PR #8260/2434bfe8 backport][3.9] Adjust import timings test for python 3.12 (#8265) Co-authored-by: J. Nick Koston --- tests/test_imports.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index b98a2763458..7d0869d46c4 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,6 +28,11 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_TARGET_TIMINGS_BY_PYTHON_VERSION = { + "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs +} + + @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", @@ -59,4 +64,7 @@ def test_import_time(pytester: pytest.Pytester) -> None: else: os.environ["PYTHONPATH"] = old_path - assert best_time_ms < 200 + expected_time = _TARGET_TIMINGS_BY_PYTHON_VERSION.get( + f"{sys.version_info.major}.{sys.version_info.minor}", 200 + ) + assert best_time_ms < expected_time From 286f50ca69b69d2f0377fbb68482ad9791beb42e Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 23:46:41 +0000 Subject: [PATCH 113/144] [PR #8260/2434bfe8 backport][3.10] Adjust import timings test for python 3.12 (#8266) Co-authored-by: J. Nick Koston --- tests/test_imports.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_imports.py b/tests/test_imports.py index b98a2763458..7d0869d46c4 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -28,6 +28,11 @@ def test_web___all__(pytester: pytest.Pytester) -> None: result.assert_outcomes(passed=0, errors=0) +_TARGET_TIMINGS_BY_PYTHON_VERSION = { + "3.12": 250, # 3.12 is expected to be a bit slower due to performance trade-offs +} + + @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", @@ -59,4 +64,7 @@ def test_import_time(pytester: pytest.Pytester) -> None: else: os.environ["PYTHONPATH"] = old_path - assert best_time_ms < 200 + expected_time = _TARGET_TIMINGS_BY_PYTHON_VERSION.get( + f"{sys.version_info.major}.{sys.version_info.minor}", 200 + ) + assert best_time_ms < expected_time From 0b98984216ca4dd25e5db428c49b1a9a859827cf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 00:29:48 +0000 Subject: [PATCH 114/144] [PR #8259/f78ef6f8 backport][3.10] Flag python 3.11/12 support (#8262) **This is a backport of PR #8259 as merged into master (f78ef6f8e9e3a9503f3a17357cb4aad63c0df43d).** Co-authored-by: Sam Bull --- .github/workflows/ci-cd.yml | 24 ++---------------------- setup.cfg | 2 ++ 2 files changed, 4 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index d78c4b56304..cb0ab872df6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,7 +125,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10'] + pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -141,28 +141,8 @@ jobs: no-extensions: 'Y' os: ubuntu experimental: false - - os: macos - pyver: "3.11" - experimental: true - no-extensions: 'Y' - os: ubuntu - pyver: "3.11" - experimental: false - no-extensions: 'Y' - - os: windows - pyver: "3.11" - experimental: true - no-extensions: 'Y' - - os: ubuntu - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: macos - pyver: "3.12" - experimental: true - no-extensions: 'Y' - - os: windows - pyver: "3.12" + pyver: "3.13" experimental: true no-extensions: 'Y' fail-fast: true diff --git a/setup.cfg b/setup.cfg index c291057ec7a..f407fbf901d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,6 +36,8 @@ classifiers = Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Topic :: Internet :: WWW/HTTP From 111f3702118c39cafe54b9fe74bcd47dcd471a5a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 00:47:37 +0000 Subject: [PATCH 115/144] [PR #8251/c21b76d0 backport][3.10] Leave websocket transport open if receive times out or is cancelled (#8264) Co-authored-by: J. Nick Koston --- CHANGES/8251.bugfix.rst | 4 ++ aiohttp/web_ws.py | 3 +- tests/test_web_websocket.py | 3 +- tests/test_web_websocket_functional.py | 93 ++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8251.bugfix.rst diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst new file mode 100644 index 00000000000..6fc6507cfe2 --- /dev/null +++ b/CHANGES/8251.bugfix.rst @@ -0,0 +1,4 @@ +Leave websocket transport open if receive times out or is cancelled +-- by :user:`bdraco`. + +This restores the behavior prior to the change in #7978. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 52604d8a1eb..9fe66527539 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -462,8 +462,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: waiter = self._waiting set_result(waiter, True) self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + except asyncio.TimeoutError: raise except EofStream: self._close_code = WSCloseCode.OK diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 543fe91db07..d9eeda3d1d2 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -472,7 +472,8 @@ async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() - assert len(ws._req.transport.close.mock_calls) == 1 + # Should not close the connection on timeout + assert len(ws._req.transport.close.mock_calls) == 0 async def test_multiple_receive_on_close_connection(make_request) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 7af4c6091eb..b471b131c1e 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,8 @@ # HTTP websocket server functional tests import asyncio +import contextlib +import sys from typing import Any, Optional import pytest @@ -797,3 +799,94 @@ async def ws_handler(request): resp = await client.get("/api/null", timeout=1) assert (await resp.json()) == {"err": None} resp.close() + + +async def test_receive_being_cancelled_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive()) + await asyncio.sleep(0) + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await asyncio.sleep(0) + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed + + +async def test_receive_timeout_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + timed_out = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive(sys.float_info.min)) + with contextlib.suppress(asyncio.TimeoutError): + await task + + timed_out.set_result(None) + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await timed_out + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed From eaddd9c7a36600d504e28f5d9d6e35cd683a06d8 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:08:15 +0000 Subject: [PATCH 116/144] [PR #8251/c21b76d0 backport][3.9] Leave websocket transport open if receive times out or is cancelled (#8263) Co-authored-by: J. Nick Koston --- CHANGES/8251.bugfix.rst | 4 ++ aiohttp/web_ws.py | 3 +- tests/test_web_websocket.py | 3 +- tests/test_web_websocket_functional.py | 93 ++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 3 deletions(-) create mode 100644 CHANGES/8251.bugfix.rst diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst new file mode 100644 index 00000000000..6fc6507cfe2 --- /dev/null +++ b/CHANGES/8251.bugfix.rst @@ -0,0 +1,4 @@ +Leave websocket transport open if receive times out or is cancelled +-- by :user:`bdraco`. + +This restores the behavior prior to the change in #7978. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 52604d8a1eb..9fe66527539 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -462,8 +462,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: waiter = self._waiting set_result(waiter, True) self._waiting = None - except (asyncio.CancelledError, asyncio.TimeoutError): - self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE) + except asyncio.TimeoutError: raise except EofStream: self._close_code = WSCloseCode.OK diff --git a/tests/test_web_websocket.py b/tests/test_web_websocket.py index 543fe91db07..d9eeda3d1d2 100644 --- a/tests/test_web_websocket.py +++ b/tests/test_web_websocket.py @@ -472,7 +472,8 @@ async def test_receive_timeouterror(make_request: Any, loop: Any) -> None: with pytest.raises(asyncio.TimeoutError): await ws.receive() - assert len(ws._req.transport.close.mock_calls) == 1 + # Should not close the connection on timeout + assert len(ws._req.transport.close.mock_calls) == 0 async def test_multiple_receive_on_close_connection(make_request) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 7af4c6091eb..b471b131c1e 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -1,6 +1,8 @@ # HTTP websocket server functional tests import asyncio +import contextlib +import sys from typing import Any, Optional import pytest @@ -797,3 +799,94 @@ async def ws_handler(request): resp = await client.get("/api/null", timeout=1) assert (await resp.json()) == {"err": None} resp.close() + + +async def test_receive_being_cancelled_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive()) + await asyncio.sleep(0) + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await asyncio.sleep(0) + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed + + +async def test_receive_timeout_keeps_connection_open( + loop: Any, aiohttp_client: Any +) -> None: + closed = loop.create_future() + timed_out = loop.create_future() + + async def handler(request): + ws = web.WebSocketResponse(autoping=False) + await ws.prepare(request) + + task = asyncio.create_task(ws.receive(sys.float_info.min)) + with contextlib.suppress(asyncio.TimeoutError): + await task + + timed_out.set_result(None) + + msg = await ws.receive() + assert msg.type == WSMsgType.PING + await asyncio.sleep(0) + await ws.pong("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.CLOSE + assert msg.data == WSCloseCode.OK + assert msg.extra == "exit message" + closed.set_result(None) + return ws + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + ws = await client.ws_connect("/", autoping=False) + + await timed_out + await ws.ping("data") + + msg = await ws.receive() + assert msg.type == WSMsgType.PONG + assert msg.data == b"data" + + await ws.close(code=WSCloseCode.OK, message="exit message") + + await closed From 3a13bd56a1d027a662390c9ee5fb4b8c006751de Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:51:48 +0000 Subject: [PATCH 117/144] [PR #8267/509fb269 backport][3.9] Add note about tasks that should be cancelled on shutdown (#8268) **This is a backport of PR #8267 as merged into master (509fb269ac7ed0fe9ad387eccf7d1112aa9c0e65).** Co-authored-by: Sam Bull --- CHANGES/8267.doc.rst | 1 + docs/web_advanced.rst | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 CHANGES/8267.doc.rst diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst new file mode 100644 index 00000000000..69f11d37560 --- /dev/null +++ b/CHANGES/8267.doc.rst @@ -0,0 +1 @@ +Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 3549a5c7e36..d2ba3013e30 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -952,6 +952,14 @@ steps 4 and 7). :ref:`cleanup contexts`. 7. Cancel any remaining tasks and wait on them to complete. +.. note:: + + When creating new tasks in a handler which _should_ be cancelled on server shutdown, + then it is important to keep track of those tasks and explicitly cancel them in a + :attr:`Application.on_shutdown` callback. As we can see from the above steps, + without this the server will wait on those new tasks to complete before it continues + with server shutdown. + Websocket shutdown ^^^^^^^^^^^^^^^^^^ From d10bd0a8742a3824f030236ef34f05ea84cf2496 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 30 Mar 2024 01:52:10 +0000 Subject: [PATCH 118/144] [PR #8267/509fb269 backport][3.10] Add note about tasks that should be cancelled on shutdown (#8269) **This is a backport of PR #8267 as merged into master (509fb269ac7ed0fe9ad387eccf7d1112aa9c0e65).** Co-authored-by: Sam Bull --- CHANGES/8267.doc.rst | 1 + docs/web_advanced.rst | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 CHANGES/8267.doc.rst diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst new file mode 100644 index 00000000000..69f11d37560 --- /dev/null +++ b/CHANGES/8267.doc.rst @@ -0,0 +1 @@ +Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index 3549a5c7e36..d2ba3013e30 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -952,6 +952,14 @@ steps 4 and 7). :ref:`cleanup contexts`. 7. Cancel any remaining tasks and wait on them to complete. +.. note:: + + When creating new tasks in a handler which _should_ be cancelled on server shutdown, + then it is important to keep track of those tasks and explicitly cancel them in a + :attr:`Application.on_shutdown` callback. As we can see from the above steps, + without this the server will wait on those new tasks to complete before it continues + with server shutdown. + Websocket shutdown ^^^^^^^^^^^^^^^^^^ From 46e8f64b2155971ed9f35c25f22fbfe8d90bfb90 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 31 Mar 2024 02:41:31 +0100 Subject: [PATCH 119/144] [PR #8271/e0d9d3ab backport][3.9] Fix race condition with request handlers on shutdown (#8272) **This is a backport of PR #8271 as merged into master (e0d9d3ab9417cea6d25a37a75d12d3a9c1755a8b).** Co-authored-by: Sam Bull --- CHANGES/8271.bugfix.rst | 1 + aiohttp/web_runner.py | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/8271.bugfix.rst diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst new file mode 100644 index 00000000000..9d572ba2fe6 --- /dev/null +++ b/CHANGES/8271.bugfix.rst @@ -0,0 +1 @@ +Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 6999b5c5feb..19a4441658f 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -307,6 +307,9 @@ async def cleanup(self) -> None: await site.stop() if self._server: # If setup succeeded + # Yield to event loop to ensure incoming requests prior to stopping the sites + # have all started to be handled before we proceed to close idle connections. + await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() From f74026ab443f3fc48a296e98801f7bce3b37c308 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 31 Mar 2024 02:41:56 +0100 Subject: [PATCH 120/144] [PR #8271/e0d9d3ab backport][3.10] Fix race condition with request handlers on shutdown (#8273) **This is a backport of PR #8271 as merged into master (e0d9d3ab9417cea6d25a37a75d12d3a9c1755a8b).** Co-authored-by: Sam Bull --- CHANGES/8271.bugfix.rst | 1 + aiohttp/web_runner.py | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 CHANGES/8271.bugfix.rst diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst new file mode 100644 index 00000000000..9d572ba2fe6 --- /dev/null +++ b/CHANGES/8271.bugfix.rst @@ -0,0 +1 @@ +Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 6999b5c5feb..19a4441658f 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -307,6 +307,9 @@ async def cleanup(self) -> None: await site.stop() if self._server: # If setup succeeded + # Yield to event loop to ensure incoming requests prior to stopping the sites + # have all started to be handled before we proceed to close idle connections. + await asyncio.sleep(0) self._server.pre_shutdown() await self.shutdown() From f91cb32a570f65543c05fb2c17e616b520899646 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 10:37:10 +0000 Subject: [PATCH 121/144] Bump cython from 3.0.9 to 3.0.10 (#8276) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [cython](https://github.com/cython/cython) from 3.0.9 to 3.0.10.
Changelog

Sourced from cython's changelog.

3.0.10 (2024-03-30)

Bugs fixed

  • Cython generated incorrect self-casts when directly calling final methods of subtypes. Patch by Lisandro Dalcin. (Github issue :issue:2747)

  • Internal C names generated from C function signatures could become too long for MSVC. (Github issue :issue:6052)

  • The noexcept warnings could be misleading in some cases. Patch by Gonzalo Tornaría. (Github issue :issue:6087)

  • The @cython.ufunc implementation could generate incomplete C code. (Github issue :issue:6064)

  • The libcpp.complex declarations could result in incorrect C++ code. Patch by Raffi Enficiaud. (Github issue :issue:6037)

  • Several tests were adapted to work with both NumPy 1.x and 2.0. Patch by Matti Picus. (Github issues :issue:6076, :issue:6100)

  • C compiler warnings when the freelist implementation is disabled (e.g. on PyPy) were fixed. It can now be disabled explicitly with the C macro guard CYTHON_USE_FREELISTS=0. (Github issue :issue:6099)

  • Some C macro guards for feature flags were missing from the NOGIL Python configuration.

  • Some recently added builtins were unconditionally looked up at module import time (if used by user code) that weren't available on all Python versions and could thus fail the import.

  • A performance hint regarding exported pxd declarations was improved. (Github issue :issue:6001)

Commits
  • 7ae8531 Build: Upgrade action version.
  • 71c5775 Fix Py3-ism in Py2/3 code.
  • 6cab0d6 Prepare release of 3.0.10.
  • 4e842a9 Remove a useless 'f' string prefix.
  • 8aba690 Improve performance hints for nogil + pxd (#6088)
  • ae120d5 Disable pstats tests also in 3.13 as long as they wait for a new C-API in CPy...
  • 27364ef Update changelog.
  • e178fc2 Reduce cap length of cnames generated from long function argument lists (GH-...
  • 03e8393 Update changelog.
  • 5fc42b1 Fix self cast when calling final methods (GH-6085)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.0.9&new-version=3.0.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index fa5adf12e1a..4c46b1e5745 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -62,7 +62,7 @@ cryptography==41.0.2 # via # pyjwt # trustme -cython==3.0.9 +cython==3.0.10 # via -r requirements/cython.in distlib==0.3.3 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index f7fecfb3b95..63bcc1ac3b5 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.0.9 +cython==3.0.10 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in From 836f4a743b3230567aebe69dfb1e8112e60acfc1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 10:47:12 +0000 Subject: [PATCH 122/144] Bump aiodns from 3.1.1 to 3.2.0 (#8277) Bumps [aiodns](https://github.com/saghul/aiodns) from 3.1.1 to 3.2.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=aiodns&package-manager=pip&previous-version=3.1.1&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 6bc448debd4..977f97dd5da 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/base.txt --strip-extras requirements/base.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 4c46b1e5745..f13a0f83ef3 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/constraints.txt --resolver=backtracking --strip-extras requirements/constraints.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 7df659854e5..6ea71482ea4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/dev.txt --resolver=backtracking --strip-extras requirements/dev.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 4a968058d61..d1efee2aecf 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 6f24fe2b1fa..19edd509007 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/test.txt --resolver=backtracking --strip-extras requirements/test.in # -aiodns==3.1.1 ; sys_platform == "linux" or sys_platform == "darwin" +aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in aiohappyeyeballs==2.3.2 # via -r requirements/runtime-deps.in From 3f79241bcbc02c9850348fc04c064fcbc980e8f0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 18:59:05 +0100 Subject: [PATCH 123/144] [PR #8286/28f1fd88 backport][3.9] docs: remove repetitive word in comment (#8287) **This is a backport of PR #8286 as merged into master (28f1fd88cbe44d81487d5e9b23a3302d032811bd).** ## What do these changes do? ## Are there changes in behavior for the user? ## Is it a substantial burden for the maintainers to support this? ## Related issue number ## Checklist - [x] I think the code is well written - [ ] Unit tests for the changes exist - [ ] Documentation reflects the changes - [ ] If you provide code modification, please add yourself to `CONTRIBUTORS.txt` * The format is <Name> <Surname>. * Please keep alphabetical order, the file is sorted by names. - [ ] Add a new news fragment into the `CHANGES/` folder * name it `..rst` (e.g. `588.bugfix.rst`) * if you don't have an issue number, change it to the pull request number after creating the PR * `.bugfix`: A bug fix for something the maintainers deemed an improper undesired behavior that got corrected to match pre-agreed expectations. * `.feature`: A new behavior, public APIs. That sort of stuff. * `.deprecation`: A declaration of future API removals and breaking changes in behavior. * `.breaking`: When something public is removed in a breaking way. Could be deprecated in an earlier release. * `.doc`: Notable updates to the documentation structure or build process. * `.packaging`: Notes for downstreams about unobvious side effects and tooling. Changes in the test invocation considerations and runtime assumptions. * `.contrib`: Stuff that affects the contributor experience. e.g. Running tests, building the docs, setting up the development environment. * `.misc`: Changes that are hard to assign to any of the above categories. * Make sure to use full sentences with correct case and punctuation, for example: ```rst Fixed issue with non-ascii contents in doctest text files -- by :user:`contributor-gh-handle`. ``` Use the past tense or the present tense a non-imperative mood, referring to what's changed compared to the last released version of this project. Co-authored-by: crazehang <165746307+crazehang@users.noreply.github.com> --- docs/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing.rst b/docs/testing.rst index 15f163107f2..027ba63a039 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -775,7 +775,7 @@ Utilities mocked.assert_called_with(1, 2) - :param return_value: A value that the the mock object will return when + :param return_value: A value that the mock object will return when called. :returns: A mock object that behaves as a coroutine which returns *return_value* when called. From 6643115c6978f052abc293085b633e47d35061a0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 4 Apr 2024 19:25:02 +0100 Subject: [PATCH 124/144] [PR #8286/28f1fd88 backport][3.10] docs: remove repetitive word in comment (#8288) **This is a backport of PR #8286 as merged into master (28f1fd88cbe44d81487d5e9b23a3302d032811bd).** Co-authored-by: crazehang <165746307+crazehang@users.noreply.github.com> --- docs/testing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/testing.rst b/docs/testing.rst index 15f163107f2..027ba63a039 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -775,7 +775,7 @@ Utilities mocked.assert_called_with(1, 2) - :param return_value: A value that the the mock object will return when + :param return_value: A value that the mock object will return when called. :returns: A mock object that behaves as a coroutine which returns *return_value* when called. From bb231059b14277c34a8a0331e51406d5abe4f424 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 03:58:05 +0000 Subject: [PATCH 125/144] [PR #8283/54e13b0a backport][3.9] Fix blocking I/O in the event loop while processing files in a post request (#8293) Co-authored-by: J. Nick Koston --- CHANGES/8283.bugfix.rst | 2 ++ aiohttp/test_utils.py | 11 +++++++++-- aiohttp/web_request.py | 10 ++++++---- 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 CHANGES/8283.bugfix.rst diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst new file mode 100644 index 00000000000..d456d59ba8e --- /dev/null +++ b/CHANGES/8283.bugfix.rst @@ -0,0 +1,2 @@ +Fixed blocking I/O in the event loop while processing files in a POST request +-- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index b5821a7fb84..a36e8599689 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -594,8 +594,15 @@ def make_mocked_request( """ task = mock.Mock() if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () if version < HttpVersion(1, 1): closing = True diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 781713e5985..4bc670a798c 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -726,19 +726,21 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": # https://tools.ietf.org/html/rfc7578#section-4.4 if field.filename: # store file in temp file - tmp = tempfile.TemporaryFile() + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) - tmp.write(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: - tmp.close() + await self._loop.run_in_executor(None, tmp.close) raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) - tmp.seek(0) + await self._loop.run_in_executor(None, tmp.seek, 0) if field_ct is None: field_ct = "application/octet-stream" From ef06656568de70520be7c4a555f7ea896d43194b Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 04:04:38 +0000 Subject: [PATCH 126/144] [PR #8283/54e13b0a backport][3.10] Fix blocking I/O in the event loop while processing files in a post request (#8294) Co-authored-by: J. Nick Koston --- CHANGES/8283.bugfix.rst | 2 ++ aiohttp/test_utils.py | 11 +++++++++-- aiohttp/web_request.py | 10 ++++++---- 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 CHANGES/8283.bugfix.rst diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst new file mode 100644 index 00000000000..d456d59ba8e --- /dev/null +++ b/CHANGES/8283.bugfix.rst @@ -0,0 +1,2 @@ +Fixed blocking I/O in the event loop while processing files in a POST request +-- by :user:`bdraco`. diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index b5821a7fb84..a36e8599689 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -594,8 +594,15 @@ def make_mocked_request( """ task = mock.Mock() if loop is ...: - loop = mock.Mock() - loop.create_future.return_value = () + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () if version < HttpVersion(1, 1): closing = True diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 781713e5985..4bc670a798c 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -726,19 +726,21 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": # https://tools.ietf.org/html/rfc7578#section-4.4 if field.filename: # store file in temp file - tmp = tempfile.TemporaryFile() + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) - tmp.write(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) size += len(chunk) if 0 < max_size < size: - tmp.close() + await self._loop.run_in_executor(None, tmp.close) raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) - tmp.seek(0) + await self._loop.run_in_executor(None, tmp.seek, 0) if field_ct is None: field_ct = "application/octet-stream" From 38dd9b8557f35bdfc1376e5833fb8e235c9d49ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 4 Apr 2024 18:11:40 -1000 Subject: [PATCH 127/144] Fix AsyncResolver to match ThreadedResolver behavior (#8270) (#8295) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sviatoslav Sydorenko (Святослав Сидоренко) --- CHANGES/8270.bugfix.rst | 9 ++ aiohttp/abc.py | 28 ++++- aiohttp/connector.py | 16 +-- aiohttp/resolver.py | 94 +++++++++----- docs/abc.rst | 54 ++++++++ docs/conf.py | 3 +- examples/fake_server.py | 10 +- requirements/runtime-deps.in | 2 +- setup.cfg | 2 +- tests/test_resolver.py | 236 +++++++++++++++++++++++++---------- 10 files changed, 343 insertions(+), 111 deletions(-) create mode 100644 CHANGES/8270.bugfix.rst diff --git a/CHANGES/8270.bugfix.rst b/CHANGES/8270.bugfix.rst new file mode 100644 index 00000000000..bda77223959 --- /dev/null +++ b/CHANGES/8270.bugfix.rst @@ -0,0 +1,9 @@ +Fix ``AsyncResolver`` to match ``ThreadedResolver`` behavior +-- by :user:`bdraco`. + +On system with IPv6 support, the :py:class:`~aiohttp.resolver.AsyncResolver` would not fallback +to providing A records when AAAA records were not available. +Additionally, unlike the :py:class:`~aiohttp.resolver.ThreadedResolver`, the :py:class:`~aiohttp.resolver.AsyncResolver` +did not handle link-local addresses correctly. + +This change makes the behavior consistent with the :py:class:`~aiohttp.resolver.ThreadedResolver`. diff --git a/aiohttp/abc.py b/aiohttp/abc.py index ee838998997..b6c0514a615 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -1,5 +1,6 @@ import asyncio import logging +import socket from abc import ABC, abstractmethod from collections.abc import Sized from http.cookies import BaseCookie, Morsel @@ -14,6 +15,7 @@ List, Optional, Tuple, + TypedDict, ) from multidict import CIMultiDict @@ -119,11 +121,35 @@ def __await__(self) -> Generator[Any, None, StreamResponse]: """Execute the view handler.""" +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + class AbstractResolver(ABC): """Abstract DNS resolver.""" @abstractmethod - async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: + async def resolve( + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[ResolveResult]: """Return IP address for given hostname""" @abstractmethod diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 64c678d4b78..90f7c3bb8c0 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -34,7 +34,7 @@ import attr from . import hdrs, helpers -from .abc import AbstractResolver +from .abc import AbstractResolver, ResolveResult from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, @@ -693,14 +693,14 @@ async def _create_connection( class _DNSCacheTable: def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {} + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} self._timestamps: Dict[Tuple[str, int], float] = {} self._ttl = ttl def __contains__(self, host: object) -> bool: return host in self._addrs_rr - def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None: + def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: self._addrs_rr[key] = (cycle(addrs), len(addrs)) if self._ttl is not None: @@ -716,7 +716,7 @@ def clear(self) -> None: self._addrs_rr.clear() self._timestamps.clear() - def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]: + def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: loop, length = self._addrs_rr[key] addrs = list(islice(loop, length)) # Consume one more element to shift internal state of `cycle` @@ -834,7 +834,7 @@ def clear_dns_cache( async def _resolve_host( self, host: str, port: int, traces: Optional[List["Trace"]] = None - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): return [ @@ -890,7 +890,7 @@ async def _resolve_host( return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: - def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None: + def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: with suppress(Exception, asyncio.CancelledError): fut.result() @@ -903,7 +903,7 @@ async def _resolve_host_with_throttle( host: str, port: int, traces: Optional[List["Trace"]], - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: """Resolve host with a dns events throttle.""" if key in self._throttle_dns_events: # get event early, before any await (#4014) @@ -1217,7 +1217,7 @@ async def _start_tls_connection( return tls_transport, tls_proto def _convert_hosts_to_addr_infos( - self, hosts: List[Dict[str, Any]] + self, hosts: List[ResolveResult] ) -> List[aiohappyeyeballs.AddrInfoType]: """Converts the list of hosts to a list of addr_infos. diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 6c17b1e7e89..ad502c5e5c8 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,8 +1,9 @@ import asyncio import socket -from typing import Any, Dict, List, Optional, Type, Union +import sys +from typing import Any, Dict, List, Optional, Tuple, Type, Union -from .abc import AbstractResolver +from .abc import AbstractResolver, ResolveResult from .helpers import get_running_loop __all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver") @@ -10,12 +11,16 @@ try: import aiodns - # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname') + # aiodns_default = hasattr(aiodns.DNSResolver, 'getaddrinfo') except ImportError: # pragma: no cover aiodns = None + aiodns_default = False +_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV +_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + class ThreadedResolver(AbstractResolver): """Threaded resolver. @@ -28,45 +33,45 @@ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: self._loop = get_running_loop(loop) async def resolve( - self, hostname: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + self, host: str, port: int = 0, family: int = socket.AF_INET + ) -> List[ResolveResult]: infos = await self._loop.getaddrinfo( - hostname, + host, port, type=socket.SOCK_STREAM, family=family, flags=socket.AI_ADDRCONFIG, ) - hosts = [] + hosts: List[ResolveResult] = [] for family, _, proto, _, address in infos: if family == socket.AF_INET6: if len(address) < 3: # IPv6 is not supported by Python build, # or IPv6 is not enabled in the host continue - if address[3]: + if address[3] and _SUPPORTS_SCOPE_ID: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. - host, _port = socket.getnameinfo( - address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV + resolved_host, _port = await self._loop.getnameinfo( + address, _NUMERIC_SOCKET_FLAGS ) port = int(_port) else: - host, port = address[:2] + resolved_host, port = address[:2] else: # IPv4 assert family == socket.AF_INET - host, port = address # type: ignore[misc] + resolved_host, port = address # type: ignore[misc] hosts.append( - { - "hostname": hostname, - "host": host, - "port": port, - "family": family, - "proto": proto, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=proto, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) return hosts @@ -96,23 +101,48 @@ def __init__( async def resolve( self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + ) -> List[ResolveResult]: try: - resp = await self._resolver.gethostbyname(host, family) + resp = await self._resolver.getaddrinfo( + host, + port=port, + type=socket.SOCK_STREAM, + family=family, + flags=socket.AI_ADDRCONFIG, + ) except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" raise OSError(msg) from exc - hosts = [] - for address in resp.addresses: + hosts: List[ResolveResult] = [] + for node in resp.nodes: + address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr + family = node.family + if family == socket.AF_INET6: + if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + # This is essential for link-local IPv6 addresses. + # LL IPv6 is a VERY rare case. Strictly speaking, we should use + # getnameinfo() unconditionally, but performance makes sense. + result = await self._resolver.getnameinfo( + (address[0].decode("ascii"), *address[1:]), + _NUMERIC_SOCKET_FLAGS, + ) + resolved_host = result.node + else: + resolved_host = address[0].decode("ascii") + port = address[1] + else: # IPv4 + assert family == socket.AF_INET + resolved_host = address[0].decode("ascii") + port = address[1] hosts.append( - { - "hostname": host, - "host": address, - "port": port, - "family": family, - "proto": 0, - "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV, - } + ResolveResult( + hostname=host, + host=resolved_host, + port=port, + family=family, + proto=0, + flags=_NUMERIC_SOCKET_FLAGS, + ) ) if not hosts: diff --git a/docs/abc.rst b/docs/abc.rst index d2695673fcf..4eea6715991 100644 --- a/docs/abc.rst +++ b/docs/abc.rst @@ -181,3 +181,57 @@ Abstract Access Logger :param response: :class:`aiohttp.web.Response` object. :param float time: Time taken to serve the request. + + +Abstract Resolver +------------------------------- + +.. class:: AbstractResolver + + An abstract class, base for all resolver implementations. + + Method ``resolve`` should be overridden. + + .. method:: resolve(host, port, family) + + Resolve host name to IP address. + + :param str host: host name to resolve. + + :param int port: port number. + + :param int family: socket family. + + :return: list of :class:`aiohttp.abc.ResolveResult` instances. + + .. method:: close() + + Release resolver. + +.. class:: ResolveResult + + Result of host name resolution. + + .. attribute:: hostname + + The host name that was provided. + + .. attribute:: host + + The IP address that was resolved. + + .. attribute:: port + + The port that was resolved. + + .. attribute:: family + + The address family that was resolved. + + .. attribute:: proto + + The protocol that was resolved. + + .. attribute:: flags + + The flags that were resolved. diff --git a/docs/conf.py b/docs/conf.py index f21366fb488..c834296ceeb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -394,7 +394,8 @@ ("py:class", "aiohttp.protocol.HttpVersion"), # undocumented ("py:class", "aiohttp.ClientRequest"), # undocumented ("py:class", "aiohttp.payload.Payload"), # undocumented - ("py:class", "aiohttp.abc.AbstractResolver"), # undocumented + ("py:class", "aiohttp.resolver.AsyncResolver"), # undocumented + ("py:class", "aiohttp.resolver.ThreadedResolver"), # undocumented ("py:func", "aiohttp.ws_connect"), # undocumented ("py:meth", "start"), # undocumented ("py:exc", "aiohttp.ClientHttpProxyError"), # undocumented diff --git a/examples/fake_server.py b/examples/fake_server.py index 3157bab658c..4f796d42386 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -3,10 +3,11 @@ import pathlib import socket import ssl +from typing import List, Union import aiohttp from aiohttp import web -from aiohttp.abc import AbstractResolver +from aiohttp.abc import AbstractResolver, ResolveResult from aiohttp.resolver import DefaultResolver from aiohttp.test_utils import unused_port @@ -19,7 +20,12 @@ def __init__(self, fakes, *, loop): self._fakes = fakes self._resolver = DefaultResolver(loop=loop) - async def resolve(self, host, port=0, family=socket.AF_INET): + async def resolve( + self, + host: str, + port: int = 0, + family: Union[socket.AddressFamily, int] = socket.AF_INET, + ) -> List[ResolveResult]: fake_port = self._fakes.get(host) if fake_port is not None: return [ diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 70bd75bd99d..2299584a463 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,6 +1,6 @@ # Extracted from `setup.cfg` via `make sync-direct-runtime-deps` -aiodns; sys_platform=="linux" or sys_platform=="darwin" +aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" aiohappyeyeballs >= 2.3.0 aiosignal >= 1.1.2 async-timeout >= 4.0, < 5.0 ; python_version < "3.11" diff --git a/setup.cfg b/setup.cfg index f407fbf901d..02a5d54d114 100644 --- a/setup.cfg +++ b/setup.cfg @@ -65,7 +65,7 @@ install_requires = [options.extras_require] speedups = # required c-ares (aiodns' backend) will not build on windows - aiodns; sys_platform=="linux" or sys_platform=="darwin" + aiodns >= 3.2.0; sys_platform=="linux" or sys_platform=="darwin" Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 1b389f3601b..2650ccadd6e 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -1,25 +1,57 @@ import asyncio import ipaddress import socket -from typing import Any, List +from ipaddress import ip_address +from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union from unittest.mock import Mock, patch import pytest -from aiohttp.resolver import AsyncResolver, DefaultResolver, ThreadedResolver +from aiohttp.resolver import ( + _NUMERIC_SOCKET_FLAGS, + _SUPPORTS_SCOPE_ID, + AsyncResolver, + DefaultResolver, + ThreadedResolver, +) try: import aiodns - gethostbyname = hasattr(aiodns.DNSResolver, "gethostbyname") + getaddrinfo: Any = hasattr(aiodns.DNSResolver, "getaddrinfo") except ImportError: aiodns = None - gethostbyname = False + getaddrinfo = False -class FakeResult: - def __init__(self, addresses): - self.addresses = addresses +class FakeAIODNSAddrInfoNode(NamedTuple): + + family: int + addr: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] + + +class FakeAIODNSAddrInfoIPv4Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode(socket.AF_INET, (h.encode(), 0)) for h in hosts + ] + + +class FakeAIODNSAddrInfoIPv6Result: + def __init__(self, hosts: Collection[str]) -> None: + self.nodes = [ + FakeAIODNSAddrInfoNode( + socket.AF_INET6, + (h.encode(), 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + +class FakeAIODNSNameInfoIPv6Result: + def __init__(self, host: str) -> None: + self.node = host + self.service = None class FakeQueryResult: @@ -27,16 +59,30 @@ def __init__(self, host): self.host = host -async def fake_result(addresses): - return FakeResult(addresses=tuple(addresses)) +async def fake_aiodns_getaddrinfo_ipv4_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv4Result: + return FakeAIODNSAddrInfoIPv4Result(hosts=hosts) + + +async def fake_aiodns_getaddrinfo_ipv6_result( + hosts: Collection[str], +) -> FakeAIODNSAddrInfoIPv6Result: + return FakeAIODNSAddrInfoIPv6Result(hosts=hosts) + + +async def fake_aiodns_getnameinfo_ipv6_result( + host: str, +) -> FakeAIODNSNameInfoIPv6Result: + return FakeAIODNSNameInfoIPv6Result(host) async def fake_query_result(result): return [FakeQueryResult(host=h) for h in result] -def fake_addrinfo(hosts): - async def fake(*args, **kwargs): +def fake_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: if not hosts: raise socket.gaierror @@ -45,33 +91,83 @@ async def fake(*args, **kwargs): return fake -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_positive_lookup(loop) -> None: +def fake_ipv6_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> List[Any]: + if not hosts: + raise socket.gaierror + + return [ + ( + socket.AF_INET6, + None, + socket.SOCK_STREAM, + None, + (h, 0, 0, 3 if ip_address(h).is_link_local else 0), + ) + for h in hosts + ] + + return fake + + +def fake_ipv6_nameinfo(host: str) -> Callable[..., Awaitable[Any]]: + async def fake(*args: Any, **kwargs: Any) -> Tuple[str, int]: + return host, 0 + + return fake + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result( + ["127.0.0.1"] + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET) - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_positive_lookup(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result(["127.0.0.1"]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( + ["fe80::1"] + ) + mock().getnameinfo.return_value = fake_aiodns_getnameinfo_ipv6_result( + "fe80::1%eth0" + ) + resolver = AsyncResolver() real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().query.assert_called_with("www.python.org", "A") - - -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_multiple_replies(loop) -> None: + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) + mock().getnameinfo.assert_called_with( + ("fe80::1", 0, 0, 3), _NUMERIC_SOCKET_FLAGS + ) + + +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_multiple_replies(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] - mock().gethostbyname.return_value = fake_result(ips) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result(ips) + resolver = AsyncResolver() real = await resolver.resolve("www.google.com") ips = [ipaddress.ip_address(x["host"]) for x in real] assert len(ips) > 3, "Expecting multiple addresses" @@ -88,40 +184,20 @@ async def test_async_resolver_query_multiple_replies(loop) -> None: ips = [ipaddress.ip_address(x["host"]) for x in real] -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_negative_lookup(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_query_negative_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_negative_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.side_effect = aiodns.error.DNSError() - resolver = AsyncResolver(loop=loop) - with pytest.raises(OSError): - await resolver.resolve("doesnotexist.bla") - - -@pytest.mark.skipif(aiodns is None, reason="aiodns required") -async def test_async_resolver_no_hosts_in_query(loop) -> None: - with patch("aiodns.DNSResolver") as mock: - del mock().gethostbyname - mock().query.return_value = fake_query_result([]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.side_effect = aiodns.error.DNSError() + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_no_hosts_in_gethostbyname(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_no_hosts_in_getaddrinfo(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result([]) - resolver = AsyncResolver(loop=loop) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv4_result([]) + resolver = AsyncResolver() with pytest.raises(OSError): await resolver.resolve("doesnotexist.bla") @@ -135,6 +211,20 @@ async def test_threaded_resolver_positive_lookup() -> None: ipaddress.ip_address(real[0]["host"]) +@pytest.mark.skipif( + not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" +) +async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: + loop = Mock() + loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) + loop.getnameinfo = fake_ipv6_nameinfo("fe80::1%eth0") + resolver = ThreadedResolver() + resolver._loop = loop + real = await resolver.resolve("www.python.org") + assert real[0]["hostname"] == "www.python.org" + ipaddress.ip_address(real[0]["host"]) + + async def test_threaded_resolver_multiple_replies() -> None: loop = Mock() ips = ["127.0.0.1", "127.0.0.2", "127.0.0.3", "127.0.0.4"] @@ -154,6 +244,16 @@ async def test_threaded_negative_lookup() -> None: await resolver.resolve("doesnotexist.bla") +async def test_threaded_negative_ipv6_lookup() -> None: + loop = Mock() + ips: List[Any] = [] + loop.getaddrinfo = fake_ipv6_addrinfo(ips) + resolver = ThreadedResolver() + resolver._loop = loop + with pytest.raises(socket.gaierror): + await resolver.resolve("doesnotexist.bla") + + async def test_threaded_negative_lookup_with_unknown_result() -> None: loop = Mock() @@ -202,14 +302,20 @@ async def test_default_loop_for_async_resolver(loop) -> None: assert resolver._loop is loop -@pytest.mark.skipif(not gethostbyname, reason="aiodns 1.1 required") -async def test_async_resolver_ipv6_positive_lookup(loop) -> None: +@pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") +async def test_async_resolver_ipv6_positive_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: - mock().gethostbyname.return_value = fake_result(["::1"]) - resolver = AsyncResolver(loop=loop) - real = await resolver.resolve("www.python.org", family=socket.AF_INET6) + mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result(["::1"]) + resolver = AsyncResolver() + real = await resolver.resolve("www.python.org") ipaddress.ip_address(real[0]["host"]) - mock().gethostbyname.assert_called_with("www.python.org", socket.AF_INET6) + mock().getaddrinfo.assert_called_with( + "www.python.org", + family=socket.AF_INET, + flags=socket.AI_ADDRCONFIG, + port=0, + type=socket.SOCK_STREAM, + ) @pytest.mark.skipif(aiodns is None, reason="aiodns required") @@ -230,7 +336,7 @@ async def test_async_resolver_aiodns_not_present(loop, monkeypatch) -> None: def test_default_resolver() -> None: - # if gethostbyname: + # if getaddrinfo: # assert DefaultResolver is AsyncResolver # else: # assert DefaultResolver is ThreadedResolver From d15f07cfbbdecf3de8ca1db10ca5d182ace7b09c Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 5 Apr 2024 19:50:29 +0100 Subject: [PATCH 128/144] Upgrade to llhttp 9.2.1 (#8292) (#8297) Fixes #8291. (cherry picked from commit 4d72dca6869072fb073621f8b752225e216a92d9) --- CHANGES/8292.feature.rst | 1 + aiohttp/http_parser.py | 8 +++-- tests/test_http_parser.py | 65 +++++++++++++++++++++++++++++---------- vendor/llhttp | 2 +- 4 files changed, 57 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8292.feature.rst diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst new file mode 100644 index 00000000000..6ca82503143 --- /dev/null +++ b/CHANGES/8292.feature.rst @@ -0,0 +1 @@ +Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 8bd8519ff6b..cce0b788d46 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -128,10 +128,12 @@ def __init__( max_line_size: int = 8190, max_headers: int = 32768, max_field_size: int = 8190, + lax: bool = False, ) -> None: self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size + self._lax = lax def parse_headers( self, lines: List[bytes] @@ -178,7 +180,7 @@ def parse_headers( line = lines[lines_idx] # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding if continuation: @@ -273,7 +275,9 @@ def __init__( self._payload_parser: Optional[HttpPayloadParser] = None self._auto_decompress = auto_decompress self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) @abc.abstractmethod def parse_message(self, lines: List[bytes]) -> _MsgT: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index da7f1182b3a..04b254c0ae8 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -108,8 +108,7 @@ def test_c_parser_loaded(): def test_parse_headers(parser: Any) -> None: text = b"""GET /test HTTP/1.1\r -test: line\r - continue\r +test: a line\r test2: data\r \r """ @@ -117,13 +116,24 @@ def test_parse_headers(parser: Any) -> None: assert len(messages) == 1 msg = messages[0][0] - assert list(msg.headers.items()) == [("test", "line continue"), ("test2", "data")] - assert msg.raw_headers == ((b"test", b"line continue"), (b"test2", b"data")) + assert list(msg.headers.items()) == [("test", "a line"), ("test2", "data")] + assert msg.raw_headers == ((b"test", b"a line"), (b"test2", b"data")) assert not msg.should_close assert msg.compression is None assert not msg.upgrade +def test_reject_obsolete_line_folding(parser: Any) -> None: + text = b"""GET /test HTTP/1.1\r +test: line\r + Content-Length: 48\r +test2: data\r +\r +""" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + @pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None: parser = HttpRequestParserC( @@ -353,8 +363,8 @@ def test_parse_delayed(parser) -> None: def test_headers_multi_feed(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" - text2 = b"test: line\r" - text3 = b"\n continue\r\n\r\n" + text2 = b"test: line" + text3 = b" continue\r\n\r\n" messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 @@ -713,31 +723,30 @@ def test_max_header_value_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40965, 8191]) -def test_max_header_value_size_continuation(parser, size) -> None: +def test_max_header_value_size_continuation(response, size) -> None: name = b"T" * (size - 5) - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - parser.feed_data(text) + response.feed_data(text) -def test_max_header_value_size_continuation_under_limit(parser) -> None: +def test_max_header_value_size_continuation_under_limit(response) -> None: value = b"A" * 8185 - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + value + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n" - messages, upgrade, tail = parser.feed_data(text) + messages, upgrade, tail = response.feed_data(text) msg = messages[0][0] - assert msg.method == "GET" - assert msg.path == "/test" + assert msg.code == 200 + assert msg.reason == "Ok" assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - assert not msg.should_close + # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url == URL("/test") def test_http_request_parser(parser) -> None: @@ -991,6 +1000,30 @@ def test_http_response_parser_utf8_without_reason(response: Any) -> None: assert not tail +def test_http_response_parser_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + messages, upgraded, tail = response.feed_data(text) + assert len(messages) == 1 + msg = messages[0][0] + + assert msg.version == (1, 1) + assert msg.code == 200 + assert msg.reason == "Ok" + assert msg.headers == CIMultiDict([("TEST", "line continue")]) + assert msg.raw_headers == ((b"test", b"line continue"),) + assert not upgraded + assert not tail + + +@pytest.mark.dev_mode +def test_http_response_parser_strict_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + with pytest.raises(http_exceptions.BadHttpMessage): + response.feed_data(text) + + @pytest.mark.parametrize("size", [40962, 8191]) def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) diff --git a/vendor/llhttp b/vendor/llhttp index 533845688d1..b0b279fb5a6 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 533845688d173561b9cba33269130401add38567 +Subproject commit b0b279fb5a617ab3bc2fc11c5f8bd937aac687c1 From 270ae9cf6a9e6159b5e29a950deb6ff7600aebc5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 21:31:25 +0100 Subject: [PATCH 129/144] [PR #8297/d15f07cf backport][3.9] Upgrade to llhttp 9.2.1 (#8292) (#8298) **This is a backport of PR #8297 as merged into 3.10 (d15f07cfbbdecf3de8ca1db10ca5d182ace7b09c).** Fixes #8291. (cherry picked from commit 4d72dca6869072fb073621f8b752225e216a92d9) Co-authored-by: Sam Bull --- CHANGES/8292.feature.rst | 1 + aiohttp/http_parser.py | 8 +++-- tests/test_http_parser.py | 65 +++++++++++++++++++++++++++++---------- vendor/llhttp | 2 +- 4 files changed, 57 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8292.feature.rst diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst new file mode 100644 index 00000000000..6ca82503143 --- /dev/null +++ b/CHANGES/8292.feature.rst @@ -0,0 +1 @@ +Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index 8bd8519ff6b..cce0b788d46 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -128,10 +128,12 @@ def __init__( max_line_size: int = 8190, max_headers: int = 32768, max_field_size: int = 8190, + lax: bool = False, ) -> None: self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size + self._lax = lax def parse_headers( self, lines: List[bytes] @@ -178,7 +180,7 @@ def parse_headers( line = lines[lines_idx] # consume continuation lines - continuation = line and line[0] in (32, 9) # (' ', '\t') + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding if continuation: @@ -273,7 +275,9 @@ def __init__( self._payload_parser: Optional[HttpPayloadParser] = None self._auto_decompress = auto_decompress self._limit = limit - self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size) + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) @abc.abstractmethod def parse_message(self, lines: List[bytes]) -> _MsgT: diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index a37a08632d7..d76bb64bab5 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -108,8 +108,7 @@ def test_c_parser_loaded(): def test_parse_headers(parser: Any) -> None: text = b"""GET /test HTTP/1.1\r -test: line\r - continue\r +test: a line\r test2: data\r \r """ @@ -117,13 +116,24 @@ def test_parse_headers(parser: Any) -> None: assert len(messages) == 1 msg = messages[0][0] - assert list(msg.headers.items()) == [("test", "line continue"), ("test2", "data")] - assert msg.raw_headers == ((b"test", b"line continue"), (b"test2", b"data")) + assert list(msg.headers.items()) == [("test", "a line"), ("test2", "data")] + assert msg.raw_headers == ((b"test", b"a line"), (b"test2", b"data")) assert not msg.should_close assert msg.compression is None assert not msg.upgrade +def test_reject_obsolete_line_folding(parser: Any) -> None: + text = b"""GET /test HTTP/1.1\r +test: line\r + Content-Length: 48\r +test2: data\r +\r +""" + with pytest.raises(http_exceptions.BadHttpMessage): + parser.feed_data(text) + + @pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.") def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None: parser = HttpRequestParserC( @@ -342,8 +352,8 @@ def test_parse_delayed(parser) -> None: def test_headers_multi_feed(parser) -> None: text1 = b"GET /test HTTP/1.1\r\n" - text2 = b"test: line\r" - text3 = b"\n continue\r\n\r\n" + text2 = b"test: line" + text3 = b" continue\r\n\r\n" messages, upgrade, tail = parser.feed_data(text1) assert len(messages) == 0 @@ -705,31 +715,30 @@ def test_max_header_value_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40965, 8191]) -def test_max_header_value_size_continuation(parser, size) -> None: +def test_max_header_value_size_continuation(response, size) -> None: name = b"T" * (size - 5) - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - parser.feed_data(text) + response.feed_data(text) -def test_max_header_value_size_continuation_under_limit(parser) -> None: +def test_max_header_value_size_continuation_under_limit(response) -> None: value = b"A" * 8185 - text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + value + b"\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n" - messages, upgrade, tail = parser.feed_data(text) + messages, upgrade, tail = response.feed_data(text) msg = messages[0][0] - assert msg.method == "GET" - assert msg.path == "/test" + assert msg.code == 200 + assert msg.reason == "Ok" assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - assert not msg.should_close + # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 assert msg.compression is None assert not msg.upgrade assert not msg.chunked - assert msg.url == URL("/test") def test_http_request_parser(parser) -> None: @@ -970,6 +979,30 @@ def test_http_response_parser_utf8_without_reason(response: Any) -> None: assert not tail +def test_http_response_parser_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + messages, upgraded, tail = response.feed_data(text) + assert len(messages) == 1 + msg = messages[0][0] + + assert msg.version == (1, 1) + assert msg.code == 200 + assert msg.reason == "Ok" + assert msg.headers == CIMultiDict([("TEST", "line continue")]) + assert msg.raw_headers == ((b"test", b"line continue"),) + assert not upgraded + assert not tail + + +@pytest.mark.dev_mode +def test_http_response_parser_strict_obs_line_folding(response: Any) -> None: + text = b"HTTP/1.1 200 Ok\r\ntest: line\r\n continue\r\n\r\n" + + with pytest.raises(http_exceptions.BadHttpMessage): + response.feed_data(text) + + @pytest.mark.parametrize("size", [40962, 8191]) def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) diff --git a/vendor/llhttp b/vendor/llhttp index 533845688d1..b0b279fb5a6 160000 --- a/vendor/llhttp +++ b/vendor/llhttp @@ -1 +1 @@ -Subproject commit 533845688d173561b9cba33269130401add38567 +Subproject commit b0b279fb5a617ab3bc2fc11c5f8bd937aac687c1 From 767c413093fff1406d01f914a78c39b0cc7ae622 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 7 Apr 2024 12:50:53 +0100 Subject: [PATCH 130/144] Fix handling of multipart/form-data (#8280) (#8301) https://datatracker.ietf.org/doc/html/rfc7578 (cherry picked from commit 7d0be3fee540a3d4161ac7dc76422f1f5ea60104) --- CHANGES/8280.bugfix.rst | 1 + CHANGES/8280.deprecation.rst | 2 + aiohttp/formdata.py | 12 +++- aiohttp/multipart.py | 121 +++++++++++++++++++++----------- tests/test_client_functional.py | 44 +----------- tests/test_multipart.py | 68 ++++++++++++++---- tests/test_web_functional.py | 27 ++----- 7 files changed, 155 insertions(+), 120 deletions(-) create mode 100644 CHANGES/8280.bugfix.rst create mode 100644 CHANGES/8280.deprecation.rst diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst new file mode 100644 index 00000000000..3aebe36fe9e --- /dev/null +++ b/CHANGES/8280.bugfix.rst @@ -0,0 +1 @@ +Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst new file mode 100644 index 00000000000..302dbb2fe2a --- /dev/null +++ b/CHANGES/8280.deprecation.rst @@ -0,0 +1,2 @@ +Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() +` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index e7cd24ca9f7..2b75b3de72c 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -1,4 +1,5 @@ import io +import warnings from typing import Any, Iterable, List, Optional from urllib.parse import urlencode @@ -53,7 +54,12 @@ def add_field( if isinstance(value, io.IOBase): self._is_multipart = True elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) filename = name type_options: MultiDict[str] = MultiDict({"name": name}) @@ -81,7 +87,11 @@ def add_field( "content_transfer_encoding must be an instance" " of str. Got: %s" % content_transfer_encoding ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) self._is_multipart = True self._fields.append((type_options, headers, value)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 4471dd4bb7e..a43ec545713 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -256,13 +256,22 @@ class BodyPartReader: chunk_size = 8192 def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, ) -> None: self.headers = headers self._boundary = boundary self._content = content + self._default_charset = default_charset self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 self._unread: Deque[bytes] = deque() @@ -329,6 +338,8 @@ async def _read_chunk_from_length(self, size: int) -> bytes: assert self._length is not None, "Content-Length required for chunked read" chunk_size = min(size, self._length - self._read_bytes) chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True return chunk async def _read_chunk_from_stream(self, size: int) -> bytes: @@ -449,7 +460,8 @@ def decode(self, data: bytes) -> bytes: """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: return self._decode_content(data) return data @@ -483,7 +495,7 @@ def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, "") mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) + return mimetype.parameters.get("charset", self._default_charset or default) @reify def name(self) -> Optional[str]: @@ -538,9 +550,17 @@ class MultipartReader: part_reader_cls = BodyPartReader def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + self.headers = headers self._boundary = ("--" + self._get_boundary()).encode() self._content = content + self._default_charset: Optional[str] = None self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None self._at_eof = False self._at_bof = True @@ -592,7 +612,24 @@ async def next( await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return None - self._last_part = await self.fetch_next_part() + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part return self._last_part async def release(self) -> None: @@ -628,19 +665,16 @@ def _get_part_reader( return type(self)(headers, self._content) return self.multipart_reader_cls(headers, self._content) else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, ) - boundary = mimetype.parameters["boundary"] + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] if len(boundary) > 70: raise ValueError("boundary %r is too long (70 chars max)" % boundary) @@ -731,6 +765,7 @@ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> No super().__init__(None, content_type=ctype) self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" def __enter__(self) -> "MultipartWriter": return self @@ -808,32 +843,36 @@ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Paylo def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" - # compression - encoding: Optional[str] = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding: Optional[str] = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] return payload diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 4d804a31ddc..4a24196a28e 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -1387,48 +1387,6 @@ async def handler(request): resp.close() -async def test_POST_DATA_with_context_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(text=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field("name", "text", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - -async def test_POST_DATA_with_content_type_context_transfer_encoding(aiohttp_client): - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(body=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field( - "name", "text", content_type="text/plain", content_transfer_encoding="base64" - ) - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - async def test_POST_MultiDict(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -1480,7 +1438,7 @@ async def handler(request): with fname.open("rb") as f: async with client.post( - "/", data={"some": f, "test": b"data"}, chunked=True + "/", data={"some": f, "test": io.BytesIO(b"data")}, chunked=True ) as resp: assert 200 == resp.status diff --git a/tests/test_multipart.py b/tests/test_multipart.py index f9d130e7949..dbfaf74b9b7 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -944,6 +944,58 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_form_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"ascii" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b"Content-Type: text/plain;charset=UTF-8\r\n" + b'Content-Disposition: form-data; name="field2"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field3"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + field1 = await reader.next() + assert field1.name == "field1" + assert field1.get_charset("default") == "ascii" + field2 = await reader.next() + assert field2.name == "field2" + assert field2.get_charset("default") == "UTF-8" + field3 = await reader.next() + assert field3.name == "field3" + assert field3.get_charset("default") == "ascii" + + async def test_read_form_invalid_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"this-value-is-too-long-to-be-a-charset" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + with pytest.raises(RuntimeError, match="Invalid default charset"): + await reader.next() + async def test_writer(writer) -> None: assert writer.size == 7 @@ -1280,7 +1332,6 @@ async def test_preserve_content_disposition_header(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1291,9 +1342,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_set_content_disposition_override(self, buf, stream): @@ -1307,7 +1356,6 @@ async def test_set_content_disposition_override(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1318,9 +1366,7 @@ async def test_set_content_disposition_override(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_reset_content_disposition_header(self, buf, stream): @@ -1332,8 +1378,6 @@ async def test_reset_content_disposition_header(self, buf, stream): headers={CONTENT_TYPE: "text/plain"}, ) - content_length = part.size - assert CONTENT_DISPOSITION in part.headers part.set_content_disposition("attachments", filename="bug.py") @@ -1346,9 +1390,7 @@ async def test_reset_content_disposition_header(self, buf, stream): b"--:\r\n" b"Content-Type: text/plain\r\n" b"Content-Disposition:" - b' attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b' attachments; filename="bug.py"' ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 04fc2e35fd1..ee61537068b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -48,7 +48,8 @@ def fname(here): def new_dummy_form(): form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") return form @@ -447,25 +448,6 @@ async def handler(request): await resp.release() -async def test_POST_DATA_with_content_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert b"123" == data["name"] - return web.Response() - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - - await resp.release() - - async def test_post_form_with_duplicate_keys(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -523,7 +505,8 @@ async def handler(request): return web.Response() form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") app = web.Application() app.router.add_post("/", handler) @@ -727,7 +710,7 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - resp = await client.post("/", data={"file": data}) + resp = await client.post("/", data={"file": io.BytesIO(data)}) assert 200 == resp.status await resp.release() From cebe526b9c34dc3a3da9140409db63014bc4cf19 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Sun, 7 Apr 2024 13:19:31 +0100 Subject: [PATCH 131/144] Fix handling of multipart/form-data (#8280) (#8302) https://datatracker.ietf.org/doc/html/rfc7578 (cherry picked from commit 7d0be3fee540a3d4161ac7dc76422f1f5ea60104) --- CHANGES/8280.bugfix.rst | 1 + CHANGES/8280.deprecation.rst | 2 + aiohttp/formdata.py | 12 +++- aiohttp/multipart.py | 121 +++++++++++++++++++++----------- tests/test_client_functional.py | 44 +----------- tests/test_multipart.py | 68 ++++++++++++++---- tests/test_web_functional.py | 27 ++----- 7 files changed, 155 insertions(+), 120 deletions(-) create mode 100644 CHANGES/8280.bugfix.rst create mode 100644 CHANGES/8280.deprecation.rst diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst new file mode 100644 index 00000000000..3aebe36fe9e --- /dev/null +++ b/CHANGES/8280.bugfix.rst @@ -0,0 +1 @@ +Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst new file mode 100644 index 00000000000..302dbb2fe2a --- /dev/null +++ b/CHANGES/8280.deprecation.rst @@ -0,0 +1,2 @@ +Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() +` -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index e7cd24ca9f7..2b75b3de72c 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -1,4 +1,5 @@ import io +import warnings from typing import Any, Iterable, List, Optional from urllib.parse import urlencode @@ -53,7 +54,12 @@ def add_field( if isinstance(value, io.IOBase): self._is_multipart = True elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) filename = name type_options: MultiDict[str] = MultiDict({"name": name}) @@ -81,7 +87,11 @@ def add_field( "content_transfer_encoding must be an instance" " of str. Got: %s" % content_transfer_encoding ) - headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) self._is_multipart = True self._fields.append((type_options, headers, value)) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 4471dd4bb7e..a43ec545713 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -256,13 +256,22 @@ class BodyPartReader: chunk_size = 8192 def __init__( - self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, ) -> None: self.headers = headers self._boundary = boundary self._content = content + self._default_charset = default_charset self._at_eof = False - length = self.headers.get(CONTENT_LENGTH, None) + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 self._unread: Deque[bytes] = deque() @@ -329,6 +338,8 @@ async def _read_chunk_from_length(self, size: int) -> bytes: assert self._length is not None, "Content-Length required for chunked read" chunk_size = min(size, self._length - self._read_bytes) chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True return chunk async def _read_chunk_from_stream(self, size: int) -> bytes: @@ -449,7 +460,8 @@ def decode(self, data: bytes) -> bytes: """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) - if CONTENT_ENCODING in self.headers: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: return self._decode_content(data) return data @@ -483,7 +495,7 @@ def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, "") mimetype = parse_mimetype(ctype) - return mimetype.parameters.get("charset", default) + return mimetype.parameters.get("charset", self._default_charset or default) @reify def name(self) -> Optional[str]: @@ -538,9 +550,17 @@ class MultipartReader: part_reader_cls = BodyPartReader def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + self.headers = headers self._boundary = ("--" + self._get_boundary()).encode() self._content = content + self._default_charset: Optional[str] = None self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None self._at_eof = False self._at_bof = True @@ -592,7 +612,24 @@ async def next( await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return None - self._last_part = await self.fetch_next_part() + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part return self._last_part async def release(self) -> None: @@ -628,19 +665,16 @@ def _get_part_reader( return type(self)(headers, self._content) return self.multipart_reader_cls(headers, self._content) else: - return self.part_reader_cls(self._boundary, headers, self._content) - - def _get_boundary(self) -> str: - mimetype = parse_mimetype(self.headers[CONTENT_TYPE]) - - assert mimetype.type == "multipart", "multipart/* content type expected" - - if "boundary" not in mimetype.parameters: - raise ValueError( - "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE] + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, ) - boundary = mimetype.parameters["boundary"] + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] if len(boundary) > 70: raise ValueError("boundary %r is too long (70 chars max)" % boundary) @@ -731,6 +765,7 @@ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> No super().__init__(None, content_type=ctype) self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" def __enter__(self) -> "MultipartWriter": return self @@ -808,32 +843,36 @@ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Paylo def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" - # compression - encoding: Optional[str] = payload.headers.get( - CONTENT_ENCODING, - "", - ).lower() - if encoding and encoding not in ("deflate", "gzip", "identity"): - raise RuntimeError(f"unknown content encoding: {encoding}") - if encoding == "identity": - encoding = None - - # te encoding - te_encoding: Optional[str] = payload.headers.get( - CONTENT_TRANSFER_ENCODING, - "", - ).lower() - if te_encoding not in ("", "base64", "quoted-printable", "binary"): - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(te_encoding) + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert CONTENT_DISPOSITION in payload.headers + assert "name=" in payload.headers[CONTENT_DISPOSITION] + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() ) - if te_encoding == "binary": - te_encoding = None - - # size - size = payload.size - if size is not None and not (encoding or te_encoding): - payload.headers[CONTENT_LENGTH] = str(size) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] return payload diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 8a9a4e184be..dbb2dff5ac4 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -1317,48 +1317,6 @@ async def handler(request): resp.close() -async def test_POST_DATA_with_context_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(text=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field("name", "text", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - -async def test_POST_DATA_with_content_type_context_transfer_encoding(aiohttp_client): - async def handler(request): - data = await request.post() - assert data["name"] == "text" - return web.Response(body=data["name"]) - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = aiohttp.FormData() - form.add_field( - "name", "text", content_type="text/plain", content_transfer_encoding="base64" - ) - - resp = await client.post("/", data=form) - assert 200 == resp.status - content = await resp.text() - assert content == "text" - resp.close() - - async def test_POST_MultiDict(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -1410,7 +1368,7 @@ async def handler(request): with fname.open("rb") as f: async with client.post( - "/", data={"some": f, "test": b"data"}, chunked=True + "/", data={"some": f, "test": io.BytesIO(b"data")}, chunked=True ) as resp: assert 200 == resp.status diff --git a/tests/test_multipart.py b/tests/test_multipart.py index f9d130e7949..dbfaf74b9b7 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -944,6 +944,58 @@ async def test_reading_skips_prelude(self) -> None: assert first.at_eof() assert not second.at_eof() + async def test_read_form_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"ascii" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b"Content-Type: text/plain;charset=UTF-8\r\n" + b'Content-Disposition: form-data; name="field2"\r\n\r\n' + b"foo" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field3"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + field1 = await reader.next() + assert field1.name == "field1" + assert field1.get_charset("default") == "ascii" + field2 = await reader.next() + assert field2.name == "field2" + assert field2.get_charset("default") == "UTF-8" + field3 = await reader.next() + assert field3.name == "field3" + assert field3.get_charset("default") == "ascii" + + async def test_read_form_invalid_default_encoding(self) -> None: + with Stream( + b"--:\r\n" + b'Content-Disposition: form-data; name="_charset_"\r\n\r\n' + b"this-value-is-too-long-to-be-a-charset" + b"\r\n" + b"--:\r\n" + b'Content-Disposition: form-data; name="field1"\r\n\r\n' + b"foo" + b"\r\n" + ) as stream: + reader = aiohttp.MultipartReader( + {CONTENT_TYPE: 'multipart/form-data;boundary=":"'}, + stream, + ) + with pytest.raises(RuntimeError, match="Invalid default charset"): + await reader.next() + async def test_writer(writer) -> None: assert writer.size == 7 @@ -1280,7 +1332,6 @@ async def test_preserve_content_disposition_header(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1291,9 +1342,7 @@ async def test_preserve_content_disposition_header(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_set_content_disposition_override(self, buf, stream): @@ -1307,7 +1356,6 @@ async def test_set_content_disposition_override(self, buf, stream): CONTENT_TYPE: "text/python", }, ) - content_length = part.size await writer.write(stream) assert part.headers[CONTENT_TYPE] == "text/python" @@ -1318,9 +1366,7 @@ async def test_set_content_disposition_override(self, buf, stream): assert headers == ( b"--:\r\n" b"Content-Type: text/python\r\n" - b'Content-Disposition: attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b'Content-Disposition: attachments; filename="bug.py"' ) async def test_reset_content_disposition_header(self, buf, stream): @@ -1332,8 +1378,6 @@ async def test_reset_content_disposition_header(self, buf, stream): headers={CONTENT_TYPE: "text/plain"}, ) - content_length = part.size - assert CONTENT_DISPOSITION in part.headers part.set_content_disposition("attachments", filename="bug.py") @@ -1346,9 +1390,7 @@ async def test_reset_content_disposition_header(self, buf, stream): b"--:\r\n" b"Content-Type: text/plain\r\n" b"Content-Disposition:" - b' attachments; filename="bug.py"\r\n' - b"Content-Length: %s" - b"" % (str(content_length).encode(),) + b' attachments; filename="bug.py"' ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index 04fc2e35fd1..ee61537068b 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -48,7 +48,8 @@ def fname(here): def new_dummy_form(): form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") return form @@ -447,25 +448,6 @@ async def handler(request): await resp.release() -async def test_POST_DATA_with_content_transfer_encoding(aiohttp_client) -> None: - async def handler(request): - data = await request.post() - assert b"123" == data["name"] - return web.Response() - - app = web.Application() - app.router.add_post("/", handler) - client = await aiohttp_client(app) - - form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") - - resp = await client.post("/", data=form) - assert 200 == resp.status - - await resp.release() - - async def test_post_form_with_duplicate_keys(aiohttp_client) -> None: async def handler(request): data = await request.post() @@ -523,7 +505,8 @@ async def handler(request): return web.Response() form = FormData() - form.add_field("name", b"123", content_transfer_encoding="base64") + with pytest.warns(DeprecationWarning, match="BytesPayload"): + form.add_field("name", b"123", content_transfer_encoding="base64") app = web.Application() app.router.add_post("/", handler) @@ -727,7 +710,7 @@ async def handler(request): app.router.add_post("/", handler) client = await aiohttp_client(app) - resp = await client.post("/", data={"file": data}) + resp = await client.post("/", data={"file": io.BytesIO(data)}) assert 200 == resp.status await resp.release() From 410394b2e20a2045f0d4c9dcc08adbb59fff440a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 7 Apr 2024 22:27:45 +0100 Subject: [PATCH 132/144] [PR #8304/88c80c14 backport][3.10] Check for backports in CI (#8306) **This is a backport of PR #8304 as merged into master (88c80c146d16d06d78562b803fec7c7b2f849e87).** Co-authored-by: Sam Bull --- .github/workflows/labels.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/labels.yml diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml new file mode 100644 index 00000000000..a4e961e88af --- /dev/null +++ b/.github/workflows/labels.yml @@ -0,0 +1,23 @@ +name: Labels +on: + pull_request: + branches: + - 'master' + types: [labeled, opened, synchronize, reopened, unlabeled] + +jobs: + backport: + runs-on: ubuntu-latest + name: Backport label added + steps: + - uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + if (!pr.data.labels.find(l => l.name.startsWith("backport"))) + process.exit(1); From 292d961f4ee2829a1b13fad92444a4fd693fbc87 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sun, 7 Apr 2024 22:27:59 +0100 Subject: [PATCH 133/144] [PR #8304/88c80c14 backport][3.9] Check for backports in CI (#8305) **This is a backport of PR #8304 as merged into master (88c80c146d16d06d78562b803fec7c7b2f849e87).** Co-authored-by: Sam Bull --- .github/workflows/labels.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/labels.yml diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml new file mode 100644 index 00000000000..a4e961e88af --- /dev/null +++ b/.github/workflows/labels.yml @@ -0,0 +1,23 @@ +name: Labels +on: + pull_request: + branches: + - 'master' + types: [labeled, opened, synchronize, reopened, unlabeled] + +jobs: + backport: + runs-on: ubuntu-latest + name: Backport label added + steps: + - uses: actions/github-script@v7 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const pr = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.payload.pull_request.number + }); + if (!pr.data.labels.find(l => l.name.startsWith("backport"))) + process.exit(1); From 7853b08e5d71b3b616f6f98b59ca5a5537ead2f9 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 00:00:10 +0100 Subject: [PATCH 134/144] [PR #8299/28d026eb backport][3.10] Create marker for internal tests (#8308) **This is a backport of PR #8299 as merged into master (28d026eb9d0c93389431f8f142a5f7c4448d13f9).** Co-authored-by: Sam Bull --- CHANGES/8299.packaging.rst | 2 ++ setup.cfg | 1 + tests/test_imports.py | 1 + 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8299.packaging.rst diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst new file mode 100644 index 00000000000..05abc8237e2 --- /dev/null +++ b/CHANGES/8299.packaging.rst @@ -0,0 +1,2 @@ +Added an ``internal`` pytest marker for tests which should be skipped +by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/setup.cfg b/setup.cfg index 02a5d54d114..cfd1be5610f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -172,3 +172,4 @@ junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. + internal: tests which may cause issues for packagers, but should be run in aiohttp's CI. diff --git a/tests/test_imports.py b/tests/test_imports.py index 7d0869d46c4..7f35f5b8cc2 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -33,6 +33,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: } +@pytest.mark.internal @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", From ec2be0500e2674eea019c0966a7a905e9b3d6608 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 00:00:25 +0100 Subject: [PATCH 135/144] [PR #8299/28d026eb backport][3.9] Create marker for internal tests (#8307) **This is a backport of PR #8299 as merged into master (28d026eb9d0c93389431f8f142a5f7c4448d13f9).** Co-authored-by: Sam Bull --- CHANGES/8299.packaging.rst | 2 ++ setup.cfg | 1 + tests/test_imports.py | 1 + 3 files changed, 4 insertions(+) create mode 100644 CHANGES/8299.packaging.rst diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst new file mode 100644 index 00000000000..05abc8237e2 --- /dev/null +++ b/CHANGES/8299.packaging.rst @@ -0,0 +1,2 @@ +Added an ``internal`` pytest marker for tests which should be skipped +by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/setup.cfg b/setup.cfg index c514bab9f94..15d22a2f5f7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -171,3 +171,4 @@ junit_family=xunit2 xfail_strict = true markers = dev_mode: mark test to run in dev mode. + internal: tests which may cause issues for packagers, but should be run in aiohttp's CI. diff --git a/tests/test_imports.py b/tests/test_imports.py index 7d0869d46c4..7f35f5b8cc2 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -33,6 +33,7 @@ def test_web___all__(pytester: pytest.Pytester) -> None: } +@pytest.mark.internal @pytest.mark.skipif( not sys.platform.startswith("linux") or platform.python_implementation() == "PyPy", reason="Timing is more reliable on Linux", From 6392df7b4f6b446441ffcdd7629cba18207e9591 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 10:46:05 +0000 Subject: [PATCH 136/144] Bump typing-extensions from 4.10.0 to 4.11.0 (#8312) Bumps [typing-extensions](https://github.com/python/typing_extensions) from 4.10.0 to 4.11.0.
Release notes

Sourced from typing-extensions's releases.

4.11.0

Release 4.11.0 (April 5, 2024)

This feature release provides improvements to various recently added features, most importantly type parameter defaults (PEP 696).

There are no changes since 4.11.0rc1.

Changes since 4.10.0:

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.

4.11.0rc1

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.
Changelog

Sourced from typing-extensions's changelog.

Release 4.11.0 (April 5, 2024)

This feature release provides improvements to various recently added features, most importantly type parameter defaults (PEP 696).

There are no changes since 4.11.0rc1.

Release 4.11.0rc1 (March 24, 2024)

  • Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
  • Fix the runtime behavior of type parameters with defaults (PEP 696). Patch by Nadir Chowdhury.
  • Fix minor discrepancy between error messages produced by typing and typing_extensions on Python 3.10. Patch by Jelle Zijlstra.
  • When include_extra=False, get_type_hints() now strips ReadOnly from the annotation.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=typing-extensions&package-manager=pip&previous-version=4.10.0&new-version=4.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- requirements/typing-extensions.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 977f97dd5da..e7ddfd3085d 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -36,7 +36,7 @@ pycares==4.3.0 # via aiodns pycparser==2.21 # via cffi -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in uvloop==0.19.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f13a0f83ef3..f7e0b7c4798 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -246,7 +246,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.6.1 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/cython.txt b/requirements/cython.txt index 63bcc1ac3b5..72b9a67af98 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -8,5 +8,5 @@ cython==3.0.10 # via -r requirements/cython.in multidict==6.0.5 # via -r requirements/multidict.in -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 6ea71482ea4..4e11ab6c31b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -232,7 +232,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/lint.txt b/requirements/lint.txt index 59da6563db3..fcfac455b3e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -47,7 +47,7 @@ tomli==2.0.1 # mypy # pytest # slotscheck -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # aioredis diff --git a/requirements/test.txt b/requirements/test.txt index 19edd509007..a55d127d077 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -111,7 +111,7 @@ trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in typer==0.9.0 # via python-on-whales -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via # -r requirements/typing-extensions.in # annotated-types diff --git a/requirements/typing-extensions.txt b/requirements/typing-extensions.txt index a7d80f5a0fd..289f5da53cb 100644 --- a/requirements/typing-extensions.txt +++ b/requirements/typing-extensions.txt @@ -4,5 +4,5 @@ # # pip-compile --allow-unsafe --output-file=requirements/typing-extensions.txt --resolver=backtracking --strip-extras requirements/typing-extensions.in # -typing-extensions==4.10.0 +typing-extensions==4.11.0 # via -r requirements/typing-extensions.in From b2a1f984ef3afaa087406ccaba330389f568e6d2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:12:54 +0100 Subject: [PATCH 137/144] [PR #8309/c29945a1 backport][3.10] Improve reliability of run_app test (#8316) **This is a backport of PR #8309 as merged into master (c29945a19ef93ac05f7499bfc410e234270ddbb3).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 1166421a4eb..5696928b219 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -916,10 +916,16 @@ async def stop(self, request: web.Request) -> web.Response: def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: async def test() -> None: - await asyncio.sleep(1) + await asyncio.sleep(0.5) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): - pass + for _ in range(5): # pragma: no cover + try: + async with sess.get(f"http://localhost:{port}/"): + pass + except ClientConnectorError: + await asyncio.sleep(0.5) + else: + break async with sess.get(f"http://localhost:{port}/stop"): pass From ed43040613988fc4666109aca82a5180ff165df5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Mon, 8 Apr 2024 19:13:07 +0100 Subject: [PATCH 138/144] [PR #8309/c29945a1 backport][3.9] Improve reliability of run_app test (#8315) **This is a backport of PR #8309 as merged into master (c29945a19ef93ac05f7499bfc410e234270ddbb3).** Co-authored-by: Sam Bull --- tests/test_run_app.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 1166421a4eb..5696928b219 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -916,10 +916,16 @@ async def stop(self, request: web.Request) -> web.Response: def run_app(self, port: int, timeout: int, task, extra_test=None) -> asyncio.Task: async def test() -> None: - await asyncio.sleep(1) + await asyncio.sleep(0.5) async with ClientSession() as sess: - async with sess.get(f"http://localhost:{port}/"): - pass + for _ in range(5): # pragma: no cover + try: + async with sess.get(f"http://localhost:{port}/"): + pass + except ClientConnectorError: + await asyncio.sleep(0.5) + else: + break async with sess.get(f"http://localhost:{port}/stop"): pass From 7e16dd198ea5042bd127a0d0417c40348ac1d158 Mon Sep 17 00:00:00 2001 From: Alexey Nikitin <30608416+NewGlad@users.noreply.github.com> Date: Tue, 9 Apr 2024 21:50:18 +0800 Subject: [PATCH 139/144] Patchback/backports/3.10/5fd29467fb63efdfae1ace280cec36b1f8139567/pr 8290 (#8310) --- CHANGES/8253.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/client.py | 51 ++++++++++++++++++++---------------- tests/test_client_session.py | 10 +++++++ 4 files changed, 40 insertions(+), 23 deletions(-) create mode 100644 CHANGES/8253.bugfix diff --git a/CHANGES/8253.bugfix b/CHANGES/8253.bugfix new file mode 100644 index 00000000000..91b06d9b35d --- /dev/null +++ b/CHANGES/8253.bugfix @@ -0,0 +1 @@ +Fixed "Unclosed client session" when initialization of ClientSession fails -- by :user:`NewGlad`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 6b53b5ad9c9..4442664118f 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -27,6 +27,7 @@ Alexander Shorin Alexander Travov Alexandru Mihai Alexey Firsov +Alexey Nikitin Alexey Popravka Alexey Stepanov Amin Etesamian diff --git a/aiohttp/client.py b/aiohttp/client.py index 8d8d13f25f7..6288fb8f89c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -252,6 +252,10 @@ def __init__( max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", ) -> None: + # We initialise _connector to None immediately, as it's referenced in __del__() + # and could cause issues if an exception occurs during initialisation. + self._connector: Optional[BaseConnector] = None + if loop is None: if connector is not None: loop = connector._loop @@ -266,29 +270,6 @@ def __init__( self._base_url.origin() == self._base_url ), "Only absolute URLs without path part are supported" - if connector is None: - connector = TCPConnector(loop=loop) - - if connector._loop is not loop: - raise RuntimeError("Session and connector has to use same event loop") - - self._loop = loop - - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) - - if cookie_jar is None: - cookie_jar = CookieJar(loop=loop) - self._cookie_jar = cookie_jar - - if cookies is not None: - self._cookie_jar.update_cookies(cookies) - - self._connector = connector - self._connector_owner = connector_owner - self._default_auth = auth - self._version = version - self._json_serialize = json_serialize if timeout is sentinel or timeout is None: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: @@ -324,6 +305,30 @@ def __init__( "conflict, please setup " "timeout.connect" ) + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies is not None: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env diff --git a/tests/test_client_session.py b/tests/test_client_session.py index a0654ed8ccd..416b6bbce5d 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -885,3 +885,13 @@ async def test_build_url_returns_expected_url( ) -> None: session = await create_session(base_url) assert session._build_url(url) == expected_url + + +async def test_instantiation_with_invalid_timeout_value(loop): + loop.set_debug(False) + logs = [] + loop.set_exception_handler(lambda loop, ctx: logs.append(ctx)) + with pytest.raises(ValueError, match="timeout parameter cannot be .*"): + ClientSession(timeout=1) + # should not have "Unclosed client session" warning + assert not logs From aa8948fb1bba0b517eefd72a509de2f572d641d4 Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Thu, 11 Apr 2024 15:53:10 +0100 Subject: [PATCH 140/144] Escape filenames and paths in HTML when generating index pages (#8317) (#8318) Co-authored-by: J. Nick Koston (cherry picked from commit ffbc43233209df302863712b511a11bdb6001b0f) --- CHANGES/8317.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 12 ++-- tests/test_web_urldispatcher.py | 124 ++++++++++++++++++++++++++++---- 3 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8317.bugfix.rst diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst new file mode 100644 index 00000000000..b24ef2aeb81 --- /dev/null +++ b/CHANGES/8317.bugfix.rst @@ -0,0 +1 @@ +Escaped filenames in static view -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index cb433e6c857..d8f148f7618 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1,7 +1,9 @@ import abc import asyncio import base64 +import functools import hashlib +import html import inspect import keyword import os @@ -90,6 +92,8 @@ _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +html_escape = functools.partial(html.escape, quote=True) + class _InfoDict(TypedDict, total=False): path: str @@ -708,7 +712,7 @@ def _directory_as_html(self, filepath: Path) -> str: assert filepath.is_dir() relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" + index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"

{index_of}

" index_list = [] @@ -716,7 +720,7 @@ def _directory_as_html(self, filepath: Path) -> str: for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") # if file is a directory, add '/' to the end of the name if _file.is_dir(): @@ -725,9 +729,7 @@ def _directory_as_html(self, filepath: Path) -> str: file_name = _file.name index_list.append( - '
  • {name}
  • '.format( - url=file_url, name=file_name - ) + f'
  • {html_escape(file_name)}
  • ' ) ul = "
      \n{}\n
    ".format("\n".join(index_list)) body = f"\n{h1}\n{ul}\n" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 7e8fe53165d..04f2029ebaf 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,6 +1,7 @@ import asyncio import functools import pathlib +import sys from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -14,31 +15,38 @@ @pytest.mark.parametrize( - "show_index,status,prefix,data", + "show_index,status,prefix,request_path,data", [ - pytest.param(False, 403, "/", None, id="index_forbidden"), + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), pytest.param( True, 200, "/", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", - id="index_root", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", ), pytest.param( True, 200, "/static", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n', id="index_static", ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir", + b"\n\nIndex of /my_dir\n\n\n

    " + b'Index of /my_dir

    \n\n\n", + id="index_subdir", + ), ], ) async def test_access_root_of_static_handler( @@ -47,6 +55,7 @@ async def test_access_root_of_static_handler( show_index: bool, status: int, prefix: str, + request_path: str, data: Optional[bytes], ) -> None: # Tests the operation of static file server. @@ -72,7 +81,94 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - async with await client.get(prefix) as r: + async with await client.get(request_path) as r: + assert r.status == status + + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data + + +@pytest.mark.internal # Dependent on filesystem +@pytest.mark.skipif( + not sys.platform.startswith("linux"), + reason="Invalid filenames on some filesystems (like Windows)", +) +@pytest.mark.parametrize( + "show_index,status,prefix,request_path,data", + [ + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), + pytest.param( + True, + 200, + "/", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + ), + pytest.param( + True, + 200, + "/static", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + id="index_static", + ), + pytest.param( + True, + 200, + "/static", + "/static/.dir", + b"\n\nIndex of /<img src=0 onerror=alert(1)>.dir</t" + b"itle>\n</head>\n<body>\n<h1>Index of /<img src=0 onerror=alert(1)>.di" + b'r</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.di' + b'r/my_file_in_dir">my_file_in_dir</a></li>\n</ul>\n</body>\n</html>', + id="index_subdir", + ), + ], +) +async def test_access_root_of_static_handler_xss( + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + show_index: bool, + status: int, + prefix: str, + request_path: str, + data: Optional[bytes], +) -> None: + # Tests the operation of static file server. + # Try to access the root of static file server, and make + # sure that correct HTTP statuses are returned depending if we directory + # index should be shown or not. + # Ensure that html in file names is escaped. + # Ensure that links are url quoted. + my_file = tmp_path / "<img src=0 onerror=alert(1)>.txt" + my_dir = tmp_path / "<img src=0 onerror=alert(1)>.dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" + + with my_file.open("w") as fw: + fw.write("hello") + + with my_file_in_dir.open("w") as fw: + fw.write("world") + + app = web.Application() + + # Register global static route: + app.router.add_static(prefix, str(tmp_path), show_index=show_index) + client = await aiohttp_client(app) + + # Request the root of the static directory. + async with await client.get(request_path) as r: assert r.status == status if data: From 28335525d1eac015a7e7584137678cbb6ff19397 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 11 Apr 2024 15:54:45 +0100 Subject: [PATCH 141/144] Escape filenames and paths in HTML when generating index pages (#8317) (#8319) Co-authored-by: J. Nick Koston <nick@koston.org> (cherry picked from commit ffbc43233209df302863712b511a11bdb6001b0f) --- CHANGES/8317.bugfix.rst | 1 + aiohttp/web_urldispatcher.py | 12 ++-- tests/test_web_urldispatcher.py | 124 ++++++++++++++++++++++++++++---- 3 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 CHANGES/8317.bugfix.rst diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst new file mode 100644 index 00000000000..b24ef2aeb81 --- /dev/null +++ b/CHANGES/8317.bugfix.rst @@ -0,0 +1 @@ +Escaped filenames in static view -- by :user:`bdraco`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 99696533444..954291f6449 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1,7 +1,9 @@ import abc import asyncio import base64 +import functools import hashlib +import html import inspect import keyword import os @@ -90,6 +92,8 @@ _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +html_escape = functools.partial(html.escape, quote=True) + class _InfoDict(TypedDict, total=False): path: str @@ -708,7 +712,7 @@ def _directory_as_html(self, filepath: Path) -> str: assert filepath.is_dir() relative_path_to_dir = filepath.relative_to(self._directory).as_posix() - index_of = f"Index of /{relative_path_to_dir}" + index_of = f"Index of /{html_escape(relative_path_to_dir)}" h1 = f"<h1>{index_of}</h1>" index_list = [] @@ -716,7 +720,7 @@ def _directory_as_html(self, filepath: Path) -> str: for _file in sorted(dir_index): # show file url as relative to static path rel_path = _file.relative_to(self._directory).as_posix() - file_url = self._prefix + "/" + rel_path + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") # if file is a directory, add '/' to the end of the name if _file.is_dir(): @@ -725,9 +729,7 @@ def _directory_as_html(self, filepath: Path) -> str: file_name = _file.name index_list.append( - '<li><a href="{url}">{name}</a></li>'.format( - url=file_url, name=file_name - ) + f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>' ) ul = "<ul>\n{}\n</ul>".format("\n".join(index_list)) body = f"<body>\n{h1}\n{ul}\n</body>" diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 76e533e473a..0441890c10b 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -1,6 +1,7 @@ import asyncio import functools import pathlib +import sys from typing import Optional from unittest import mock from unittest.mock import MagicMock @@ -14,31 +15,38 @@ @pytest.mark.parametrize( - "show_index,status,prefix,data", + "show_index,status,prefix,request_path,data", [ - pytest.param(False, 403, "/", None, id="index_forbidden"), + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), pytest.param( True, 200, "/", - b"<html>\n<head>\n<title>Index of /.\n" - b"\n\n

    Index of /.

    \n\n\n", - id="index_root", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", ), pytest.param( True, 200, "/static", - b"\n\nIndex of /.\n" - b"\n\n

    Index of /.

    \n\n\n", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n', id="index_static", ), + pytest.param( + True, + 200, + "/static", + "/static/my_dir", + b"\n\nIndex of /my_dir\n\n\n

    " + b'Index of /my_dir

    \n\n\n", + id="index_subdir", + ), ], ) async def test_access_root_of_static_handler( @@ -47,6 +55,7 @@ async def test_access_root_of_static_handler( show_index: bool, status: int, prefix: str, + request_path: str, data: Optional[bytes], ) -> None: # Tests the operation of static file server. @@ -72,7 +81,94 @@ async def test_access_root_of_static_handler( client = await aiohttp_client(app) # Request the root of the static directory. - async with await client.get(prefix) as r: + async with await client.get(request_path) as r: + assert r.status == status + + if data: + assert r.headers["Content-Type"] == "text/html; charset=utf-8" + read_ = await r.read() + assert read_ == data + + +@pytest.mark.internal # Dependent on filesystem +@pytest.mark.skipif( + not sys.platform.startswith("linux"), + reason="Invalid filenames on some filesystems (like Windows)", +) +@pytest.mark.parametrize( + "show_index,status,prefix,request_path,data", + [ + pytest.param(False, 403, "/", "/", None, id="index_forbidden"), + pytest.param( + True, + 200, + "/", + "/", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + ), + pytest.param( + True, + 200, + "/static", + "/static", + b"\n\nIndex of /.\n\n\n

    Index of" + b' /.

    \n\n\n", + id="index_static", + ), + pytest.param( + True, + 200, + "/static", + "/static/.dir", + b"\n\nIndex of /<img src=0 onerror=alert(1)>.dir</t" + b"itle>\n</head>\n<body>\n<h1>Index of /<img src=0 onerror=alert(1)>.di" + b'r</h1>\n<ul>\n<li><a href="/static/%3Cimg%20src=0%20onerror=alert(1)%3E.di' + b'r/my_file_in_dir">my_file_in_dir</a></li>\n</ul>\n</body>\n</html>', + id="index_subdir", + ), + ], +) +async def test_access_root_of_static_handler_xss( + tmp_path: pathlib.Path, + aiohttp_client: AiohttpClient, + show_index: bool, + status: int, + prefix: str, + request_path: str, + data: Optional[bytes], +) -> None: + # Tests the operation of static file server. + # Try to access the root of static file server, and make + # sure that correct HTTP statuses are returned depending if we directory + # index should be shown or not. + # Ensure that html in file names is escaped. + # Ensure that links are url quoted. + my_file = tmp_path / "<img src=0 onerror=alert(1)>.txt" + my_dir = tmp_path / "<img src=0 onerror=alert(1)>.dir" + my_dir.mkdir() + my_file_in_dir = my_dir / "my_file_in_dir" + + with my_file.open("w") as fw: + fw.write("hello") + + with my_file_in_dir.open("w") as fw: + fw.write("world") + + app = web.Application() + + # Register global static route: + app.router.add_static(prefix, str(tmp_path), show_index=show_index) + client = await aiohttp_client(app) + + # Request the root of the static directory. + async with await client.get(request_path) as r: assert r.status == status if data: From a7e240a9f625a0b9559bdf5f0049c71565352400 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 17:47:50 +0100 Subject: [PATCH 142/144] [PR #8320/9ba9a4e5 backport][3.9] Fix Python parser to mark responses without length as closing (#8321) **This is a backport of PR #8320 as merged into master (9ba9a4e531599b9cb2f8cc80effbde40c7eab0bd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8320.bugfix.rst | 1 + aiohttp/http_parser.py | 11 ++++++++++- tests/test_http_parser.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst new file mode 100644 index 00000000000..027074f743b --- /dev/null +++ b/CHANGES/8320.bugfix.rst @@ -0,0 +1 @@ +Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index cce0b788d46..013511917e8 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -718,7 +718,16 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: ) = self.parse_headers(lines) if close is None: - close = version_o <= HttpVersion10 + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True return RawResponseMessage( version_o, diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index d76bb64bab5..ee7dc4aabc5 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -735,7 +735,7 @@ def test_max_header_value_size_continuation_under_limit(response) -> None: assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 + assert msg.should_close assert msg.compression is None assert not msg.upgrade assert not msg.chunked From 68f1e414133066c4f620201a09f71b20cda8bb29 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 11 Apr 2024 18:13:17 +0100 Subject: [PATCH 143/144] [PR #8320/9ba9a4e5 backport][3.10] Fix Python parser to mark responses without length as closing (#8322) **This is a backport of PR #8320 as merged into master (9ba9a4e531599b9cb2f8cc80effbde40c7eab0bd).** Co-authored-by: Sam Bull <git@sambull.org> --- CHANGES/8320.bugfix.rst | 1 + aiohttp/http_parser.py | 11 ++++++++++- tests/test_http_parser.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst new file mode 100644 index 00000000000..027074f743b --- /dev/null +++ b/CHANGES/8320.bugfix.rst @@ -0,0 +1 @@ +Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index cce0b788d46..013511917e8 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -718,7 +718,16 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: ) = self.parse_headers(lines) if close is None: - close = version_o <= HttpVersion10 + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True return RawResponseMessage( version_o, diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 04b254c0ae8..32dd0e68b57 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -743,7 +743,7 @@ def test_max_header_value_size_continuation_under_limit(response) -> None: assert msg.version == (1, 1) assert msg.headers == CIMultiDict({"data": "test " + value.decode()}) assert msg.raw_headers == ((b"data", b"test " + value),) - # assert not msg.should_close # TODO: https://github.com/nodejs/llhttp/issues/354 + assert msg.should_close assert msg.compression is None assert not msg.upgrade assert not msg.chunked From b3397c7ac44fc80206d28f1dd0d1f3b10c4ec572 Mon Sep 17 00:00:00 2001 From: Sam Bull <git@sambull.org> Date: Thu, 11 Apr 2024 18:54:11 +0100 Subject: [PATCH 144/144] Release v3.9.4 (#8201) Preparing for tomorrow, let me know if there's any reason to delay. @bdraco @webknjaz --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- CHANGES.rst | 213 +++++++++++++++++++++++++++++++++++ CHANGES/7741.bugfix.rst | 3 - CHANGES/8089.bugfix.rst | 3 - CHANGES/8099.contrib.rst | 4 - CHANGES/8104.bugfix.rst | 1 - CHANGES/8116.contrib.rst | 1 - CHANGES/8136.contrib.rst | 7 -- CHANGES/8139.contrib.rst | 1 - CHANGES/8146.feature.rst | 1 - CHANGES/8163.bugfix.rst | 5 - CHANGES/8197.doc | 1 - CHANGES/8200.bugfix.rst | 6 - CHANGES/8251.bugfix.rst | 4 - CHANGES/8252.bugfix.rst | 2 - CHANGES/8267.doc.rst | 1 - CHANGES/8271.bugfix.rst | 1 - CHANGES/8280.bugfix.rst | 1 - CHANGES/8280.deprecation.rst | 2 - CHANGES/8283.bugfix.rst | 2 - CHANGES/8292.feature.rst | 1 - CHANGES/8299.packaging.rst | 2 - CHANGES/8317.bugfix.rst | 1 - CHANGES/8320.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 24 files changed, 214 insertions(+), 52 deletions(-) delete mode 100644 CHANGES/7741.bugfix.rst delete mode 100644 CHANGES/8089.bugfix.rst delete mode 100644 CHANGES/8099.contrib.rst delete mode 100644 CHANGES/8104.bugfix.rst delete mode 100644 CHANGES/8116.contrib.rst delete mode 100644 CHANGES/8136.contrib.rst delete mode 100644 CHANGES/8139.contrib.rst delete mode 100644 CHANGES/8146.feature.rst delete mode 100644 CHANGES/8163.bugfix.rst delete mode 100644 CHANGES/8197.doc delete mode 100644 CHANGES/8200.bugfix.rst delete mode 100644 CHANGES/8251.bugfix.rst delete mode 100644 CHANGES/8252.bugfix.rst delete mode 100644 CHANGES/8267.doc.rst delete mode 100644 CHANGES/8271.bugfix.rst delete mode 100644 CHANGES/8280.bugfix.rst delete mode 100644 CHANGES/8280.deprecation.rst delete mode 100644 CHANGES/8283.bugfix.rst delete mode 100644 CHANGES/8292.feature.rst delete mode 100644 CHANGES/8299.packaging.rst delete mode 100644 CHANGES/8317.bugfix.rst delete mode 100644 CHANGES/8320.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 64dff9b516d..72f63329af2 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,219 @@ .. towncrier release notes start + +3.9.4 (2024-04-11) +================== + +Bug fixes +--------- + +- The asynchronous internals now set the underlying causes + when assigning exceptions to the future objects + -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8089`. + + + +- Treated values of ``Accept-Encoding`` header as case-insensitive when checking + for gzip files -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8104`. + + + +- Improved the DNS resolution performance on cache hit -- by :user:`bdraco`. + + This is achieved by avoiding an :mod:`asyncio` task creation in this case. + + + *Related issues and pull requests on GitHub:* + :issue:`8163`. + + +- Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, + :meth:`aiohttp.MultipartWriter.append_json` and + :meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` + + + *Related issues and pull requests on GitHub:* + :issue:`7741`. + + + +- Ensure websocket transport is closed when client does not close it + -- by :user:`bdraco`. + + The transport could remain open if the client did not close it. This + change ensures the transport is closed when the client does not close + it. + + + *Related issues and pull requests on GitHub:* + :issue:`8200`. + + + +- Leave websocket transport open if receive times out or is cancelled + -- by :user:`bdraco`. + + This restores the behavior prior to the change in #7978. + + + *Related issues and pull requests on GitHub:* + :issue:`8251`. + + + +- Fixed content not being read when an upgrade request was not supported with the pure Python implementation. + -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8252`. + + + +- Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8271`. + + + +- Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8280`. + + + +- Fixed blocking I/O in the event loop while processing files in a POST request + -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8283`. + + + +- Escaped filenames in static view -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`8317`. + + + +- Fixed the pure python parser to mark a connection as closing when a + response has no length -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8320`. + + + + +Features +-------- + +- Upgraded *llhttp* to 9.2.1, and started rejecting obsolete line folding + in Python parser to match -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8146`, :issue:`8292`. + + + + +Deprecations (removal in next major release) +-------------------------------------------- + +- Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() + <aiohttp.FormData.add_field>` -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8280`. + + + + +Improved documentation +---------------------- + +- Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8267`. + + + + +Contributor-facing changes +-------------------------- + +- The pull request template is now asking the contributors to + answer a question about the long-term maintenance challenges + they envision as a result of merging their patches + -- by :user:`webknjaz`. + + + *Related issues and pull requests on GitHub:* + :issue:`8099`. + + + +- Updated CI and documentation to use NPM clean install and upgrade + node to version 18 -- by :user:`steverep`. + + + *Related issues and pull requests on GitHub:* + :issue:`8116`. + + + +- A pytest fixture ``hello_txt`` was introduced to aid + static file serving tests in + :file:`test_web_sendfile_functional.py`. It dynamically + provisions ``hello.txt`` file variants shared across the + tests in the module. + + -- by :user:`steverep` + + + *Related issues and pull requests on GitHub:* + :issue:`8136`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Added an ``internal`` pytest marker for tests which should be skipped + by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`8299`. + + + + +---- + + 3.9.3 (2024-01-29) ================== diff --git a/CHANGES/7741.bugfix.rst b/CHANGES/7741.bugfix.rst deleted file mode 100644 index 9134e920c14..00000000000 --- a/CHANGES/7741.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Changed the type annotations to allow ``dict`` on :meth:`aiohttp.MultipartWriter.append`, -:meth:`aiohttp.MultipartWriter.append_json` and -:meth:`aiohttp.MultipartWriter.append_form` -- by :user:`cakemanny` diff --git a/CHANGES/8089.bugfix.rst b/CHANGES/8089.bugfix.rst deleted file mode 100644 index 7f47448478d..00000000000 --- a/CHANGES/8089.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -The asynchronous internals now set the underlying causes -when assigning exceptions to the future objects --- by :user:`webknjaz`. diff --git a/CHANGES/8099.contrib.rst b/CHANGES/8099.contrib.rst deleted file mode 100644 index 827ecfa5827..00000000000 --- a/CHANGES/8099.contrib.rst +++ /dev/null @@ -1,4 +0,0 @@ -The pull request template is now asking the contributors to -answer a question about the long-term maintenance challenges -they envision as a result of merging their patches --- by :user:`webknjaz`. diff --git a/CHANGES/8104.bugfix.rst b/CHANGES/8104.bugfix.rst deleted file mode 100644 index 1ebe6f06d9d..00000000000 --- a/CHANGES/8104.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Treated values of ``Accept-Encoding`` header as case-insensitive when checking for gzip files -- by :user:`steverep`. diff --git a/CHANGES/8116.contrib.rst b/CHANGES/8116.contrib.rst deleted file mode 100644 index c1c7f2ca3bf..00000000000 --- a/CHANGES/8116.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Updated CI and documentation to use NPM clean install and upgrade node to version 18 -- by :user:`steverep`. diff --git a/CHANGES/8136.contrib.rst b/CHANGES/8136.contrib.rst deleted file mode 100644 index 69718a4e0ab..00000000000 --- a/CHANGES/8136.contrib.rst +++ /dev/null @@ -1,7 +0,0 @@ -A pytest fixture ``hello_txt`` was introduced to aid -static file serving tests in -:file:`test_web_sendfile_functional.py`. It dynamically -provisions ``hello.txt`` file variants shared across the -tests in the module. - --- by :user:`steverep` diff --git a/CHANGES/8139.contrib.rst b/CHANGES/8139.contrib.rst deleted file mode 100644 index fd743e70f4a..00000000000 --- a/CHANGES/8139.contrib.rst +++ /dev/null @@ -1 +0,0 @@ -Two definitions for "test_invalid_route_name" existed, only one was being run. Refactored them into a single parameterized test. Enabled lint rule to prevent regression. -- by :user:`alexmac`. diff --git a/CHANGES/8146.feature.rst b/CHANGES/8146.feature.rst deleted file mode 100644 index 9b0cc54206e..00000000000 --- a/CHANGES/8146.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Upgraded *llhttp* to 9.2 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8163.bugfix.rst b/CHANGES/8163.bugfix.rst deleted file mode 100644 index 8bfb10260c6..00000000000 --- a/CHANGES/8163.bugfix.rst +++ /dev/null @@ -1,5 +0,0 @@ -Improved the DNS resolution performance on cache hit --- by :user:`bdraco`. - -This is achieved by avoiding an :mod:`asyncio` task creation -in this case. diff --git a/CHANGES/8197.doc b/CHANGES/8197.doc deleted file mode 100644 index ba4117768e8..00000000000 --- a/CHANGES/8197.doc +++ /dev/null @@ -1 +0,0 @@ -Fixed false behavior of base_url param for ClientSession in client documentation -- by :user:`alexis974`. diff --git a/CHANGES/8200.bugfix.rst b/CHANGES/8200.bugfix.rst deleted file mode 100644 index e4492a8a84c..00000000000 --- a/CHANGES/8200.bugfix.rst +++ /dev/null @@ -1,6 +0,0 @@ -Ensure websocket transport is closed when client does not close it --- by :user:`bdraco`. - -The transport could remain open if the client did not close it. This -change ensures the transport is closed when the client does not close -it. diff --git a/CHANGES/8251.bugfix.rst b/CHANGES/8251.bugfix.rst deleted file mode 100644 index 6fc6507cfe2..00000000000 --- a/CHANGES/8251.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -Leave websocket transport open if receive times out or is cancelled --- by :user:`bdraco`. - -This restores the behavior prior to the change in #7978. diff --git a/CHANGES/8252.bugfix.rst b/CHANGES/8252.bugfix.rst deleted file mode 100644 index e932eb9c7ed..00000000000 --- a/CHANGES/8252.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed content not being read when an upgrade request was not supported with the pure Python implementation. --- by :user:`bdraco`. diff --git a/CHANGES/8267.doc.rst b/CHANGES/8267.doc.rst deleted file mode 100644 index 69f11d37560..00000000000 --- a/CHANGES/8267.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Added a note about canceling tasks to avoid delaying server shutdown -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8271.bugfix.rst b/CHANGES/8271.bugfix.rst deleted file mode 100644 index 9d572ba2fe6..00000000000 --- a/CHANGES/8271.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a race condition with incoming connections during server shutdown -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.bugfix.rst b/CHANGES/8280.bugfix.rst deleted file mode 100644 index 3aebe36fe9e..00000000000 --- a/CHANGES/8280.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed ``multipart/form-data`` compliance with :rfc:`7578` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8280.deprecation.rst b/CHANGES/8280.deprecation.rst deleted file mode 100644 index 302dbb2fe2a..00000000000 --- a/CHANGES/8280.deprecation.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecated ``content_transfer_encoding`` parameter in :py:meth:`FormData.add_field() -<aiohttp.FormData.add_field>` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8283.bugfix.rst b/CHANGES/8283.bugfix.rst deleted file mode 100644 index d456d59ba8e..00000000000 --- a/CHANGES/8283.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed blocking I/O in the event loop while processing files in a POST request --- by :user:`bdraco`. diff --git a/CHANGES/8292.feature.rst b/CHANGES/8292.feature.rst deleted file mode 100644 index 6ca82503143..00000000000 --- a/CHANGES/8292.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Upgraded to LLHTTP 9.2.1, and started rejecting obsolete line folding in Python parser to match -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8299.packaging.rst b/CHANGES/8299.packaging.rst deleted file mode 100644 index 05abc8237e2..00000000000 --- a/CHANGES/8299.packaging.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added an ``internal`` pytest marker for tests which should be skipped -by packagers (use ``-m 'not internal'`` to disable them) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8317.bugfix.rst b/CHANGES/8317.bugfix.rst deleted file mode 100644 index b24ef2aeb81..00000000000 --- a/CHANGES/8317.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Escaped filenames in static view -- by :user:`bdraco`. diff --git a/CHANGES/8320.bugfix.rst b/CHANGES/8320.bugfix.rst deleted file mode 100644 index 027074f743b..00000000000 --- a/CHANGES/8320.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the pure python parser to mark a connection as closing when a response has no length -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 6f6ab8e6b99..46db236d00a 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.9.3.dev0" +__version__ = "3.9.4" from typing import TYPE_CHECKING, Tuple