From 127505be50c29d1f19bafe55f9b5a5b3df468baa Mon Sep 17 00:00:00 2001 From: 4shen0ne <33086594+zrquan@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:58:44 +0800 Subject: [PATCH] Use new typing style (#963) * Use new typing style * Pass all checks --- httpcore/_api.py | 20 ++++++----- httpcore/_async/connection.py | 26 +++++++------- httpcore/_async/connection_pool.py | 46 ++++++++++++------------ httpcore/_async/http11.py | 45 ++++++++++------------- httpcore/_async/http2.py | 58 ++++++++++++++---------------- httpcore/_async/http_proxy.py | 56 +++++++++++++++-------------- httpcore/_async/interfaces.py | 20 ++++++----- httpcore/_async/socks_proxy.py | 33 +++++++++-------- httpcore/_backends/anyio.py | 28 +++++++-------- httpcore/_backends/auto.py | 15 ++++---- httpcore/_backends/base.py | 46 ++++++++++++------------ httpcore/_backends/mock.py | 53 +++++++++++++-------------- httpcore/_backends/sync.py | 34 +++++++++--------- httpcore/_backends/trio.py | 28 +++++++-------- httpcore/_models.py | 52 +++++++++++++-------------- httpcore/_sync/connection.py | 26 +++++++------- httpcore/_sync/connection_pool.py | 46 ++++++++++++------------ httpcore/_sync/http11.py | 45 ++++++++++------------- httpcore/_sync/http2.py | 58 ++++++++++++++---------------- httpcore/_sync/http_proxy.py | 56 +++++++++++++++-------------- httpcore/_sync/interfaces.py | 20 ++++++----- httpcore/_sync/socks_proxy.py | 33 +++++++++-------- httpcore/_synchronization.py | 55 ++++++++++++++-------------- httpcore/_trace.py | 28 ++++++++------- httpcore/_utils.py | 5 +-- 25 files changed, 458 insertions(+), 474 deletions(-) diff --git a/httpcore/_api.py b/httpcore/_api.py index 854235f5..e84a88fb 100644 --- a/httpcore/_api.py +++ b/httpcore/_api.py @@ -1,17 +1,19 @@ +from __future__ import annotations + from contextlib import contextmanager -from typing import Iterator, Optional, Union +from typing import Iterator from ._models import URL, Extensions, HeaderTypes, Response from ._sync.connection_pool import ConnectionPool def request( - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> Response: """ Sends an HTTP request, returning the response. @@ -47,12 +49,12 @@ def request( @contextmanager def stream( - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> Iterator[Response]: """ Sends an HTTP request, returning the response within a content manager. diff --git a/httpcore/_async/connection.py b/httpcore/_async/connection.py index 2f439cf0..2eb53c8e 100644 --- a/httpcore/_async/connection.py +++ b/httpcore/_async/connection.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import itertools import logging import ssl from types import TracebackType -from typing import Iterable, Iterator, Optional, Type +from typing import Iterable, Iterator from .._backends.auto import AutoBackend from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream @@ -37,15 +39,15 @@ class AsyncHTTPConnection(AsyncConnectionInterface): def __init__( self, origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: self._origin = origin self._ssl_context = ssl_context @@ -59,7 +61,7 @@ def __init__( self._network_backend: AsyncNetworkBackend = ( AutoBackend() if network_backend is None else network_backend ) - self._connection: Optional[AsyncConnectionInterface] = None + self._connection: AsyncConnectionInterface | None = None self._connect_failed: bool = False self._request_lock = AsyncLock() self._socket_options = socket_options @@ -208,13 +210,13 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - async def __aenter__(self) -> "AsyncHTTPConnection": + async def __aenter__(self) -> AsyncHTTPConnection: return self async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: await self.aclose() diff --git a/httpcore/_async/connection_pool.py b/httpcore/_async/connection_pool.py index 214dfc4b..314eb2c7 100644 --- a/httpcore/_async/connection_pool.py +++ b/httpcore/_async/connection_pool.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import ssl import sys from types import TracebackType -from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type +from typing import AsyncIterable, AsyncIterator, Iterable from .._backends.auto import AutoBackend from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend @@ -15,12 +17,10 @@ class AsyncPoolRequest: def __init__(self, request: Request) -> None: self.request = request - self.connection: Optional[AsyncConnectionInterface] = None + self.connection: AsyncConnectionInterface | None = None self._connection_acquired = AsyncEvent() - def assign_to_connection( - self, connection: Optional[AsyncConnectionInterface] - ) -> None: + def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: self.connection = connection self._connection_acquired.set() @@ -29,7 +29,7 @@ def clear_connection(self) -> None: self._connection_acquired = AsyncEvent() async def wait_for_connection( - self, timeout: Optional[float] = None + self, timeout: float | None = None ) -> AsyncConnectionInterface: if self.connection is None: await self._connection_acquired.wait(timeout=timeout) @@ -47,17 +47,17 @@ class AsyncConnectionPool(AsyncRequestInterface): def __init__( self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -116,8 +116,8 @@ def __init__( # The mutable state on a connection pool is the queue of incoming requests, # and the set of connections that are servicing those requests. - self._connections: List[AsyncConnectionInterface] = [] - self._requests: List[AsyncPoolRequest] = [] + self._connections: list[AsyncConnectionInterface] = [] + self._requests: list[AsyncPoolRequest] = [] # We only mutate the state of the connection pool within an 'optional_thread_lock' # context. This holds a threading lock unless we're running in async mode, @@ -139,7 +139,7 @@ def create_connection(self, origin: Origin) -> AsyncConnectionInterface: ) @property - def connections(self) -> List[AsyncConnectionInterface]: + def connections(self) -> list[AsyncConnectionInterface]: """ Return a list of the connections currently in the pool. @@ -227,7 +227,7 @@ async def handle_async_request(self, request: Request) -> Response: extensions=response.extensions, ) - def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]: + def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: """ Manage the state of the connection pool, assigning incoming requests to connections as available. @@ -298,7 +298,7 @@ def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]: return closing_connections - async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None: + async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: # Close connections which have been removed from the pool. with AsyncShieldCancellation(): for connection in closing: @@ -312,14 +312,14 @@ async def aclose(self) -> None: self._connections = [] await self._close_connections(closing_connections) - async def __aenter__(self) -> "AsyncConnectionPool": + async def __aenter__(self) -> AsyncConnectionPool: return self async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: await self.aclose() diff --git a/httpcore/_async/http11.py b/httpcore/_async/http11.py index 0493a923..b5c3cd5d 100644 --- a/httpcore/_async/http11.py +++ b/httpcore/_async/http11.py @@ -1,18 +1,11 @@ +from __future__ import annotations + import enum import logging import ssl import time from types import TracebackType -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - List, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, AsyncIterable, AsyncIterator, Union import h11 @@ -55,12 +48,12 @@ def __init__( self, origin: Origin, stream: AsyncNetworkStream, - keepalive_expiry: Optional[float] = None, + keepalive_expiry: float | None = None, ) -> None: self._origin = origin self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None self._state = HTTPConnectionState.NEW self._state_lock = AsyncLock() self._request_count = 0 @@ -167,9 +160,7 @@ async def _send_request_body(self, request: Request) -> None: await self._send_event(h11.EndOfMessage(), timeout=timeout) - async def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: + async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: bytes_to_send = self._h11_state.send(event) if bytes_to_send is not None: await self._network_stream.write(bytes_to_send, timeout=timeout) @@ -178,7 +169,7 @@ async def _send_event( async def _receive_response_headers( self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) @@ -214,8 +205,8 @@ async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes] break async def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() @@ -316,14 +307,14 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - async def __aenter__(self) -> "AsyncHTTP11Connection": + async def __aenter__(self) -> AsyncHTTP11Connection: return self async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: await self.aclose() @@ -360,7 +351,7 @@ def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: self._stream = stream self._leading_data = leading_data - async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: if self._leading_data: buffer = self._leading_data[:max_bytes] self._leading_data = self._leading_data[max_bytes:] @@ -368,7 +359,7 @@ async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: else: return await self._stream.read(max_bytes, timeout) - async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + async def write(self, buffer: bytes, timeout: float | None = None) -> None: await self._stream.write(buffer, timeout) async def aclose(self) -> None: @@ -377,8 +368,8 @@ async def aclose(self) -> None: async def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> AsyncNetworkStream: return await self._stream.start_tls(ssl_context, server_hostname, timeout) diff --git a/httpcore/_async/http2.py b/httpcore/_async/http2.py index c201ee4c..c10e1bb9 100644 --- a/httpcore/_async/http2.py +++ b/httpcore/_async/http2.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import enum import logging import time import types -import typing +from typing import AsyncIterable, AsyncIterator import h2.config import h2.connection @@ -45,14 +47,14 @@ def __init__( self, origin: Origin, stream: AsyncNetworkStream, - keepalive_expiry: typing.Optional[float] = None, + keepalive_expiry: float | None = None, ): self._origin = origin self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._keepalive_expiry: float | None = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None + self._expire_at: float | None = None self._request_count = 0 self._init_lock = AsyncLock() self._state_lock = AsyncLock() @@ -63,24 +65,20 @@ def __init__( self._connection_error = False # Mapping from stream ID to response stream events. - self._events: typing.Dict[ + self._events: dict[ int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, ] = {} # Connection terminated events are stored as state since # we need to handle them for all streams. - self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( - None - ) + self._connection_terminated: h2.events.ConnectionTerminated | None = None - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None async def handle_async_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): @@ -255,7 +253,7 @@ async def _send_request_body(self, request: Request, stream_id: int) -> None: if not has_body_headers(request): return - assert isinstance(request.stream, typing.AsyncIterable) + assert isinstance(request.stream, AsyncIterable) async for data in request.stream: await self._send_stream_data(request, stream_id, data) await self._send_end_stream(request, stream_id) @@ -284,7 +282,7 @@ async def _send_end_stream(self, request: Request, stream_id: int) -> None: async def _receive_response( self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + ) -> tuple[int, list[tuple[bytes, bytes]]]: """ Return the response status code and headers for a given stream ID. """ @@ -305,7 +303,7 @@ async def _receive_response( async def _receive_response_body( self, request: Request, stream_id: int - ) -> typing.AsyncIterator[bytes]: + ) -> AsyncIterator[bytes]: """ Iterator that returns the bytes of the response body for a given stream ID. """ @@ -321,9 +319,7 @@ async def _receive_response_body( async def _receive_stream_event( self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: """ Return the next available event for a given stream ID. @@ -337,7 +333,7 @@ async def _receive_stream_event( return event async def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None + self, request: Request, stream_id: int | None = None ) -> None: """ Read some data from the network until we see one or more events @@ -425,9 +421,7 @@ async def aclose(self) -> None: # Wrappers around network read/write operations... - async def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: + async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) @@ -451,7 +445,7 @@ async def _read_incoming_data( self._connection_error = True raise exc - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + events: list[h2.events.Event] = self._h2_state.receive_data(data) return events @@ -544,14 +538,14 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - async def __aenter__(self) -> "AsyncHTTP2Connection": + async def __aenter__(self) -> AsyncHTTP2Connection: return self async def __aexit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, ) -> None: await self.aclose() @@ -565,7 +559,7 @@ def __init__( self._stream_id = stream_id self._closed = False - async def __aiter__(self) -> typing.AsyncIterator[bytes]: + async def __aiter__(self) -> AsyncIterator[bytes]: kwargs = {"request": self._request, "stream_id": self._stream_id} try: async with Trace("receive_response_body", logger, self._request, kwargs): diff --git a/httpcore/_async/http_proxy.py b/httpcore/_async/http_proxy.py index 4aa7d874..896d28cb 100644 --- a/httpcore/_async/http_proxy.py +++ b/httpcore/_async/http_proxy.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import logging import ssl from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Iterable, Mapping, Sequence, Tuple, Union from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend from .._exceptions import ProxyError @@ -30,9 +32,9 @@ def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: + default_headers: Sequence[tuple[bytes, bytes]] | None = None, + override_headers: Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: """ Append default_headers and override_headers, de-duplicating if a key exists in both cases. @@ -60,21 +62,21 @@ class AsyncHTTPProxy(AsyncConnectionPool): def __init__( self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -172,11 +174,11 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, ) -> None: self._connection = AsyncHTTPConnection( origin=proxy_origin, @@ -236,14 +238,14 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: self._connection: AsyncConnectionInterface = AsyncHTTPConnection( origin=proxy_origin, diff --git a/httpcore/_async/interfaces.py b/httpcore/_async/interfaces.py index c998dd27..bff92014 100644 --- a/httpcore/_async/interfaces.py +++ b/httpcore/_async/interfaces.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from contextlib import asynccontextmanager -from typing import AsyncIterator, Optional, Union +from typing import AsyncIterator from .._models import ( URL, @@ -18,12 +20,12 @@ class AsyncRequestInterface: async def request( self, - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> Response: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") @@ -50,12 +52,12 @@ async def request( @asynccontextmanager async def stream( self, - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> AsyncIterator[Response]: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") diff --git a/httpcore/_async/socks_proxy.py b/httpcore/_async/socks_proxy.py index f839603f..5052f4a8 100644 --- a/httpcore/_async/socks_proxy.py +++ b/httpcore/_async/socks_proxy.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import logging import ssl -import typing from socksio import socks5 @@ -43,7 +44,7 @@ async def _init_socks5_connection( *, host: bytes, port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + auth: tuple[bytes, bytes] | None = None, ) -> None: conn = socks5.SOCKS5Connection() @@ -108,18 +109,16 @@ class AsyncSOCKSProxy(AsyncConnectionPool): def __init__( self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - network_backend: typing.Optional[AsyncNetworkBackend] = None, + network_backend: AsyncNetworkBackend | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -167,7 +166,7 @@ def __init__( username, password = proxy_auth username_bytes = enforce_bytes(username, name="proxy_auth") password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + self._proxy_auth: tuple[bytes, bytes] | None = ( username_bytes, password_bytes, ) @@ -192,12 +191,12 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, - network_backend: typing.Optional[AsyncNetworkBackend] = None, + network_backend: AsyncNetworkBackend | None = None, ) -> None: self._proxy_origin = proxy_origin self._remote_origin = remote_origin @@ -211,7 +210,7 @@ def __init__( AutoBackend() if network_backend is None else network_backend ) self._connect_lock = AsyncLock() - self._connection: typing.Optional[AsyncConnectionInterface] = None + self._connection: AsyncConnectionInterface | None = None self._connect_failed = False async def handle_async_request(self, request: Request) -> Response: diff --git a/httpcore/_backends/anyio.py b/httpcore/_backends/anyio.py index d469e008..451ffb37 100644 --- a/httpcore/_backends/anyio.py +++ b/httpcore/_backends/anyio.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import ssl -import typing +from typing import Any, Iterable import anyio @@ -20,9 +22,7 @@ class AnyIOStream(AsyncNetworkStream): def __init__(self, stream: anyio.abc.ByteStream) -> None: self._stream = stream - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: exc_map = { TimeoutError: ReadTimeout, anyio.BrokenResourceError: ReadError, @@ -36,9 +36,7 @@ async def read( except anyio.EndOfStream: # pragma: nocover return b"" - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: + async def write(self, buffer: bytes, timeout: float | None = None) -> None: if not buffer: return @@ -57,8 +55,8 @@ async def aclose(self) -> None: async def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> AsyncNetworkStream: exc_map = { TimeoutError: ConnectTimeout, @@ -81,7 +79,7 @@ async def start_tls( raise exc return AnyIOStream(ssl_stream) - def get_extra_info(self, info: str) -> typing.Any: + def get_extra_info(self, info: str) -> Any: if info == "ssl_object": return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) if info == "client_addr": @@ -101,9 +99,9 @@ async def connect_tcp( self, host: str, port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: # pragma: nocover if socket_options is None: socket_options = [] @@ -127,8 +125,8 @@ async def connect_tcp( async def connect_unix_socket( self, path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: # pragma: nocover if socket_options is None: socket_options = [] diff --git a/httpcore/_backends/auto.py b/httpcore/_backends/auto.py index 3ac05f4d..e8086acb 100644 --- a/httpcore/_backends/auto.py +++ b/httpcore/_backends/auto.py @@ -1,5 +1,6 @@ -import typing -from typing import Optional +from __future__ import annotations + +from typing import Iterable from .._synchronization import current_async_library from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream @@ -22,9 +23,9 @@ async def connect_tcp( self, host: str, port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: await self._init_backend() return await self._backend.connect_tcp( @@ -38,8 +39,8 @@ async def connect_tcp( async def connect_unix_socket( self, path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: # pragma: nocover await self._init_backend() return await self._backend.connect_unix_socket( diff --git a/httpcore/_backends/base.py b/httpcore/_backends/base.py index 6cadedb5..cf55c8b1 100644 --- a/httpcore/_backends/base.py +++ b/httpcore/_backends/base.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ssl import time import typing @@ -10,10 +12,10 @@ class NetworkStream: - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: raise NotImplementedError() # pragma: nocover - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + def write(self, buffer: bytes, timeout: float | None = None) -> None: raise NotImplementedError() # pragma: nocover def close(self) -> None: @@ -22,9 +24,9 @@ def close(self) -> None: def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: raise NotImplementedError() # pragma: nocover def get_extra_info(self, info: str) -> typing.Any: @@ -36,17 +38,17 @@ def connect_tcp( self, host: str, port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: raise NotImplementedError() # pragma: nocover def connect_unix_socket( self, path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: raise NotImplementedError() # pragma: nocover @@ -55,14 +57,10 @@ def sleep(self, seconds: float) -> None: class AsyncNetworkStream: - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: raise NotImplementedError() # pragma: nocover - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: + async def write(self, buffer: bytes, timeout: float | None = None) -> None: raise NotImplementedError() # pragma: nocover async def aclose(self) -> None: @@ -71,9 +69,9 @@ async def aclose(self) -> None: async def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "AsyncNetworkStream": + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: raise NotImplementedError() # pragma: nocover def get_extra_info(self, info: str) -> typing.Any: @@ -85,17 +83,17 @@ async def connect_tcp( self, host: str, port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: raise NotImplementedError() # pragma: nocover async def connect_unix_socket( self, path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: raise NotImplementedError() # pragma: nocover diff --git a/httpcore/_backends/mock.py b/httpcore/_backends/mock.py index f7aefebf..be33ec86 100644 --- a/httpcore/_backends/mock.py +++ b/httpcore/_backends/mock.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import ssl -import typing -from typing import Optional +from typing import Any, Iterable from .._exceptions import ReadError from .base import ( @@ -21,19 +22,19 @@ def selected_alpn_protocol(self) -> str: class MockStream(NetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 self._closed = False - def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: if self._closed: raise ReadError("Connection closed") if not self._buffer: return b"" return self._buffer.pop(0) - def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + def write(self, buffer: bytes, timeout: float | None = None) -> None: pass def close(self) -> None: @@ -42,12 +43,12 @@ def close(self) -> None: def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> NetworkStream: return self - def get_extra_info(self, info: str) -> typing.Any: + def get_extra_info(self, info: str) -> Any: return MockSSLObject(http2=self._http2) if info == "ssl_object" else None def __repr__(self) -> str: @@ -55,7 +56,7 @@ def __repr__(self) -> str: class MockBackend(NetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 @@ -63,17 +64,17 @@ def connect_tcp( self, host: str, port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: return MockStream(list(self._buffer), http2=self._http2) def connect_unix_socket( self, path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: return MockStream(list(self._buffer), http2=self._http2) @@ -82,19 +83,19 @@ def sleep(self, seconds: float) -> None: class AsyncMockStream(AsyncNetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 self._closed = False - async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: if self._closed: raise ReadError("Connection closed") if not self._buffer: return b"" return self._buffer.pop(0) - async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + async def write(self, buffer: bytes, timeout: float | None = None) -> None: pass async def aclose(self) -> None: @@ -103,12 +104,12 @@ async def aclose(self) -> None: async def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> AsyncNetworkStream: return self - def get_extra_info(self, info: str) -> typing.Any: + def get_extra_info(self, info: str) -> Any: return MockSSLObject(http2=self._http2) if info == "ssl_object" else None def __repr__(self) -> str: @@ -116,7 +117,7 @@ def __repr__(self) -> str: class AsyncMockBackend(AsyncNetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: self._buffer = buffer self._http2 = http2 @@ -124,17 +125,17 @@ async def connect_tcp( self, host: str, port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: return AsyncMockStream(list(self._buffer), http2=self._http2) async def connect_unix_socket( self, path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: return AsyncMockStream(list(self._buffer), http2=self._http2) diff --git a/httpcore/_backends/sync.py b/httpcore/_backends/sync.py index 7b7b417d..e9833171 100644 --- a/httpcore/_backends/sync.py +++ b/httpcore/_backends/sync.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import socket import ssl import sys @@ -33,8 +35,8 @@ def __init__( self, sock: socket.socket, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ): self._sock = sock self._incoming = ssl.MemoryBIO() @@ -74,7 +76,7 @@ def _perform_io( if errno is None: return ret - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} with map_exceptions(exc_map): self._sock.settimeout(timeout) @@ -82,7 +84,7 @@ def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) ) - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + def write(self, buffer: bytes, timeout: float | None = None) -> None: exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} with map_exceptions(exc_map): self._sock.settimeout(timeout) @@ -96,9 +98,9 @@ def close(self) -> None: def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: raise NotImplementedError() def get_extra_info(self, info: str) -> typing.Any: @@ -119,13 +121,13 @@ class SyncStream(NetworkStream): def __init__(self, sock: socket.socket) -> None: self._sock = sock - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} with map_exceptions(exc_map): self._sock.settimeout(timeout) return self._sock.recv(max_bytes) - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: + def write(self, buffer: bytes, timeout: float | None = None) -> None: if not buffer: return @@ -142,8 +144,8 @@ def close(self) -> None: def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> NetworkStream: exc_map: ExceptionMapping = { socket.timeout: ConnectTimeout, @@ -187,9 +189,9 @@ def connect_tcp( self, host: str, port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: # Note that we automatically include `TCP_NODELAY` # in addition to any other custom socket options. @@ -216,8 +218,8 @@ def connect_tcp( def connect_unix_socket( self, path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, ) -> NetworkStream: # pragma: nocover if sys.platform == "win32": raise RuntimeError( diff --git a/httpcore/_backends/trio.py b/httpcore/_backends/trio.py index b1626d28..a6b6ab80 100644 --- a/httpcore/_backends/trio.py +++ b/httpcore/_backends/trio.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import ssl -import typing +from typing import Any, Iterable import trio @@ -20,9 +22,7 @@ class TrioStream(AsyncNetworkStream): def __init__(self, stream: trio.abc.Stream) -> None: self._stream = stream - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { trio.TooSlowError: ReadTimeout, @@ -34,9 +34,7 @@ async def read( data: bytes = await self._stream.receive_some(max_bytes=max_bytes) return data - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: + async def write(self, buffer: bytes, timeout: float | None = None) -> None: if not buffer: return @@ -56,8 +54,8 @@ async def aclose(self) -> None: async def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> AsyncNetworkStream: timeout_or_inf = float("inf") if timeout is None else timeout exc_map: ExceptionMapping = { @@ -80,7 +78,7 @@ async def start_tls( raise exc return TrioStream(ssl_stream) - def get_extra_info(self, info: str) -> typing.Any: + def get_extra_info(self, info: str) -> Any: if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. # Tracked at https://github.com/python-trio/trio/issues/542 @@ -113,9 +111,9 @@ async def connect_tcp( self, host: str, port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + local_address: str | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: # By default for TCP sockets, trio enables TCP_NODELAY. # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream @@ -139,8 +137,8 @@ async def connect_tcp( async def connect_unix_socket( self, path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, + timeout: float | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> AsyncNetworkStream: # pragma: nocover if socket_options is None: socket_options = [] diff --git a/httpcore/_models.py b/httpcore/_models.py index dadee79f..0ffd2cd8 100644 --- a/httpcore/_models.py +++ b/httpcore/_models.py @@ -1,13 +1,13 @@ +from __future__ import annotations + from typing import ( Any, AsyncIterable, AsyncIterator, Iterable, Iterator, - List, Mapping, MutableMapping, - Optional, Sequence, Tuple, Union, @@ -24,7 +24,7 @@ Extensions = MutableMapping[str, Any] -def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: +def enforce_bytes(value: bytes | str, *, name: str) -> bytes: """ Any arguments that are ultimately represented as bytes can be specified either as bytes or as strings. @@ -45,7 +45,7 @@ def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") -def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": +def enforce_url(value: URL | bytes | str, *, name: str) -> URL: """ Type check for URL parameters. """ @@ -59,8 +59,8 @@ def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": def enforce_headers( - value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str -) -> List[Tuple[bytes, bytes]]: + value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str +) -> list[tuple[bytes, bytes]]: """ Convienence function that ensure all items in request or response headers are either bytes or strings in the plain ASCII range. @@ -91,8 +91,8 @@ def enforce_headers( def enforce_stream( - value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str -) -> Union[Iterable[bytes], AsyncIterable[bytes]]: + value: bytes | Iterable[bytes] | AsyncIterable[bytes] | None, *, name: str +) -> Iterable[bytes] | AsyncIterable[bytes]: if value is None: return ByteStream(b"") elif isinstance(value, bytes): @@ -113,11 +113,11 @@ def enforce_stream( def include_request_headers( - headers: List[Tuple[bytes, bytes]], + headers: list[tuple[bytes, bytes]], *, url: "URL", - content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], -) -> List[Tuple[bytes, bytes]]: + content: None | bytes | Iterable[bytes] | AsyncIterable[bytes], +) -> list[tuple[bytes, bytes]]: headers_set = set(k.lower() for k, v in headers) if b"host" not in headers_set: @@ -254,12 +254,12 @@ class URL: def __init__( self, - url: Union[bytes, str] = "", + url: bytes | str = "", *, - scheme: Union[bytes, str] = b"", - host: Union[bytes, str] = b"", - port: Optional[int] = None, - target: Union[bytes, str] = b"", + scheme: bytes | str = b"", + host: bytes | str = b"", + port: int | None = None, + target: bytes | str = b"", ) -> None: """ Parameters: @@ -325,12 +325,12 @@ class Request: def __init__( self, - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterable[bytes] | AsyncIterable[bytes] | None = None, + extensions: Extensions | None = None, ) -> None: """ Parameters: @@ -345,10 +345,10 @@ def __init__( """ self.method: bytes = enforce_bytes(method, name="method") self.url: URL = enforce_url(url, name="url") - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + self.headers: list[tuple[bytes, bytes]] = enforce_headers( headers, name="headers" ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + self.stream: Iterable[bytes] | AsyncIterable[bytes] = enforce_stream( content, name="content" ) self.extensions = {} if extensions is None else extensions @@ -375,8 +375,8 @@ def __init__( status: int, *, headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterable[bytes] | AsyncIterable[bytes] | None = None, + extensions: Extensions | None = None, ) -> None: """ Parameters: @@ -388,10 +388,10 @@ def __init__( `"reason_phrase"`, and `"network_stream"`. """ self.status: int = status - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( + self.headers: list[tuple[bytes, bytes]] = enforce_headers( headers, name="headers" ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( + self.stream: Iterable[bytes] | AsyncIterable[bytes] = enforce_stream( content, name="content" ) self.extensions = {} if extensions is None else extensions diff --git a/httpcore/_sync/connection.py b/httpcore/_sync/connection.py index c3890f34..117dfe44 100644 --- a/httpcore/_sync/connection.py +++ b/httpcore/_sync/connection.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import itertools import logging import ssl from types import TracebackType -from typing import Iterable, Iterator, Optional, Type +from typing import Iterable, Iterator from .._backends.sync import SyncBackend from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream @@ -37,15 +39,15 @@ class HTTPConnection(ConnectionInterface): def __init__( self, origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: self._origin = origin self._ssl_context = ssl_context @@ -59,7 +61,7 @@ def __init__( self._network_backend: NetworkBackend = ( SyncBackend() if network_backend is None else network_backend ) - self._connection: Optional[ConnectionInterface] = None + self._connection: ConnectionInterface | None = None self._connect_failed: bool = False self._request_lock = Lock() self._socket_options = socket_options @@ -208,13 +210,13 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - def __enter__(self) -> "HTTPConnection": + def __enter__(self) -> HTTPConnection: return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self.close() diff --git a/httpcore/_sync/connection_pool.py b/httpcore/_sync/connection_pool.py index 01bec59e..cf9dda68 100644 --- a/httpcore/_sync/connection_pool.py +++ b/httpcore/_sync/connection_pool.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import ssl import sys from types import TracebackType -from typing import Iterable, Iterator, Iterable, List, Optional, Type +from typing import Iterable, Iterator, Iterable from .._backends.sync import SyncBackend from .._backends.base import SOCKET_OPTION, NetworkBackend @@ -15,12 +17,10 @@ class PoolRequest: def __init__(self, request: Request) -> None: self.request = request - self.connection: Optional[ConnectionInterface] = None + self.connection: ConnectionInterface | None = None self._connection_acquired = Event() - def assign_to_connection( - self, connection: Optional[ConnectionInterface] - ) -> None: + def assign_to_connection(self, connection: ConnectionInterface | None) -> None: self.connection = connection self._connection_acquired.set() @@ -29,7 +29,7 @@ def clear_connection(self) -> None: self._connection_acquired = Event() def wait_for_connection( - self, timeout: Optional[float] = None + self, timeout: float | None = None ) -> ConnectionInterface: if self.connection is None: self._connection_acquired.wait(timeout=timeout) @@ -47,17 +47,17 @@ class ConnectionPool(RequestInterface): def __init__( self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -116,8 +116,8 @@ def __init__( # The mutable state on a connection pool is the queue of incoming requests, # and the set of connections that are servicing those requests. - self._connections: List[ConnectionInterface] = [] - self._requests: List[PoolRequest] = [] + self._connections: list[ConnectionInterface] = [] + self._requests: list[PoolRequest] = [] # We only mutate the state of the connection pool within an 'optional_thread_lock' # context. This holds a threading lock unless we're running in async mode, @@ -139,7 +139,7 @@ def create_connection(self, origin: Origin) -> ConnectionInterface: ) @property - def connections(self) -> List[ConnectionInterface]: + def connections(self) -> list[ConnectionInterface]: """ Return a list of the connections currently in the pool. @@ -227,7 +227,7 @@ def handle_request(self, request: Request) -> Response: extensions=response.extensions, ) - def _assign_requests_to_connections(self) -> List[ConnectionInterface]: + def _assign_requests_to_connections(self) -> list[ConnectionInterface]: """ Manage the state of the connection pool, assigning incoming requests to connections as available. @@ -298,7 +298,7 @@ def _assign_requests_to_connections(self) -> List[ConnectionInterface]: return closing_connections - def _close_connections(self, closing: List[ConnectionInterface]) -> None: + def _close_connections(self, closing: list[ConnectionInterface]) -> None: # Close connections which have been removed from the pool. with ShieldCancellation(): for connection in closing: @@ -312,14 +312,14 @@ def close(self) -> None: self._connections = [] self._close_connections(closing_connections) - def __enter__(self) -> "ConnectionPool": + def __enter__(self) -> ConnectionPool: return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self.close() diff --git a/httpcore/_sync/http11.py b/httpcore/_sync/http11.py index a74ff8e8..5f02f92e 100644 --- a/httpcore/_sync/http11.py +++ b/httpcore/_sync/http11.py @@ -1,18 +1,11 @@ +from __future__ import annotations + import enum import logging import ssl import time from types import TracebackType -from typing import ( - Any, - Iterable, - Iterator, - List, - Optional, - Tuple, - Type, - Union, -) +from typing import Any, Iterable, Iterator, Union import h11 @@ -55,12 +48,12 @@ def __init__( self, origin: Origin, stream: NetworkStream, - keepalive_expiry: Optional[float] = None, + keepalive_expiry: float | None = None, ) -> None: self._origin = origin self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None self._state = HTTPConnectionState.NEW self._state_lock = Lock() self._request_count = 0 @@ -167,9 +160,7 @@ def _send_request_body(self, request: Request) -> None: self._send_event(h11.EndOfMessage(), timeout=timeout) - def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: + def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: bytes_to_send = self._h11_state.send(event) if bytes_to_send is not None: self._network_stream.write(bytes_to_send, timeout=timeout) @@ -178,7 +169,7 @@ def _send_event( def _receive_response_headers( self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) @@ -214,8 +205,8 @@ def _receive_response_body(self, request: Request) -> Iterator[bytes]: break def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: while True: with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): event = self._h11_state.next_event() @@ -316,14 +307,14 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - def __enter__(self) -> "HTTP11Connection": + def __enter__(self) -> HTTP11Connection: return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self.close() @@ -360,7 +351,7 @@ def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: self._stream = stream self._leading_data = leading_data - def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: if self._leading_data: buffer = self._leading_data[:max_bytes] self._leading_data = self._leading_data[max_bytes:] @@ -368,7 +359,7 @@ def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: else: return self._stream.read(max_bytes, timeout) - def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: + def write(self, buffer: bytes, timeout: float | None = None) -> None: self._stream.write(buffer, timeout) def close(self) -> None: @@ -377,8 +368,8 @@ def close(self) -> None: def start_tls( self, ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, + server_hostname: str | None = None, + timeout: float | None = None, ) -> NetworkStream: return self._stream.start_tls(ssl_context, server_hostname, timeout) diff --git a/httpcore/_sync/http2.py b/httpcore/_sync/http2.py index 1ee4bbb3..c7bbdc89 100644 --- a/httpcore/_sync/http2.py +++ b/httpcore/_sync/http2.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import enum import logging import time import types -import typing +from typing import Iterable, Iterator import h2.config import h2.connection @@ -45,14 +47,14 @@ def __init__( self, origin: Origin, stream: NetworkStream, - keepalive_expiry: typing.Optional[float] = None, + keepalive_expiry: float | None = None, ): self._origin = origin self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry + self._keepalive_expiry: float | None = keepalive_expiry self._h2_state = h2.connection.H2Connection(config=self.CONFIG) self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None + self._expire_at: float | None = None self._request_count = 0 self._init_lock = Lock() self._state_lock = Lock() @@ -63,24 +65,20 @@ def __init__( self._connection_error = False # Mapping from stream ID to response stream events. - self._events: typing.Dict[ + self._events: dict[ int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, ] = {} # Connection terminated events are stored as state since # we need to handle them for all streams. - self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( - None - ) + self._connection_terminated: h2.events.ConnectionTerminated | None = None - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None def handle_request(self, request: Request) -> Response: if not self.can_handle_request(request.url.origin): @@ -255,7 +253,7 @@ def _send_request_body(self, request: Request, stream_id: int) -> None: if not has_body_headers(request): return - assert isinstance(request.stream, typing.Iterable) + assert isinstance(request.stream, Iterable) for data in request.stream: self._send_stream_data(request, stream_id, data) self._send_end_stream(request, stream_id) @@ -284,7 +282,7 @@ def _send_end_stream(self, request: Request, stream_id: int) -> None: def _receive_response( self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: + ) -> tuple[int, list[tuple[bytes, bytes]]]: """ Return the response status code and headers for a given stream ID. """ @@ -305,7 +303,7 @@ def _receive_response( def _receive_response_body( self, request: Request, stream_id: int - ) -> typing.Iterator[bytes]: + ) -> Iterator[bytes]: """ Iterator that returns the bytes of the response body for a given stream ID. """ @@ -321,9 +319,7 @@ def _receive_response_body( def _receive_stream_event( self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: """ Return the next available event for a given stream ID. @@ -337,7 +333,7 @@ def _receive_stream_event( return event def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None + self, request: Request, stream_id: int | None = None ) -> None: """ Read some data from the network until we see one or more events @@ -425,9 +421,7 @@ def close(self) -> None: # Wrappers around network read/write operations... - def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: + def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: timeouts = request.extensions.get("timeout", {}) timeout = timeouts.get("read", None) @@ -451,7 +445,7 @@ def _read_incoming_data( self._connection_error = True raise exc - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) + events: list[h2.events.Event] = self._h2_state.receive_data(data) return events @@ -544,14 +538,14 @@ def __repr__(self) -> str: # These context managers are not used in the standard flow, but are # useful for testing or working with connection instances directly. - def __enter__(self) -> "HTTP2Connection": + def __enter__(self) -> HTTP2Connection: return self def __exit__( self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, ) -> None: self.close() @@ -565,7 +559,7 @@ def __init__( self._stream_id = stream_id self._closed = False - def __iter__(self) -> typing.Iterator[bytes]: + def __iter__(self) -> Iterator[bytes]: kwargs = {"request": self._request, "stream_id": self._stream_id} try: with Trace("receive_response_body", logger, self._request, kwargs): diff --git a/httpcore/_sync/http_proxy.py b/httpcore/_sync/http_proxy.py index 6acac9a7..390e19ee 100644 --- a/httpcore/_sync/http_proxy.py +++ b/httpcore/_sync/http_proxy.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import logging import ssl from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Iterable, Mapping, Sequence, Tuple, Union from .._backends.base import SOCKET_OPTION, NetworkBackend from .._exceptions import ProxyError @@ -30,9 +32,9 @@ def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: + default_headers: Sequence[tuple[bytes, bytes]] | None = None, + override_headers: Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: """ Append default_headers and override_headers, de-duplicating if a key exists in both cases. @@ -60,21 +62,21 @@ class HTTPProxy(ConnectionPool): def __init__( self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -172,11 +174,11 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: NetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, ) -> None: self._connection = HTTPConnection( origin=proxy_origin, @@ -236,14 +238,14 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, + network_backend: NetworkBackend | None = None, + socket_options: Iterable[SOCKET_OPTION] | None = None, ) -> None: self._connection: ConnectionInterface = HTTPConnection( origin=proxy_origin, diff --git a/httpcore/_sync/interfaces.py b/httpcore/_sync/interfaces.py index 5e95be1e..b7defbae 100644 --- a/httpcore/_sync/interfaces.py +++ b/httpcore/_sync/interfaces.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from contextlib import contextmanager -from typing import Iterator, Optional, Union +from typing import Iterator from .._models import ( URL, @@ -18,12 +20,12 @@ class RequestInterface: def request( self, - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> Response: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") @@ -50,12 +52,12 @@ def request( @contextmanager def stream( self, - method: Union[bytes, str], - url: Union[URL, bytes, str], + method: bytes | str, + url: URL | bytes | str, *, headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, + content: bytes | Iterator[bytes] | None = None, + extensions: Extensions | None = None, ) -> Iterator[Response]: # Strict type checking on our parameters. method = enforce_bytes(method, name="method") diff --git a/httpcore/_sync/socks_proxy.py b/httpcore/_sync/socks_proxy.py index 502e4d7f..9b633675 100644 --- a/httpcore/_sync/socks_proxy.py +++ b/httpcore/_sync/socks_proxy.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import logging import ssl -import typing from socksio import socks5 @@ -43,7 +44,7 @@ def _init_socks5_connection( *, host: bytes, port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, + auth: tuple[bytes, bytes] | None = None, ) -> None: conn = socks5.SOCKS5Connection() @@ -108,18 +109,16 @@ class SOCKSProxy(ConnectionPool): def __init__( self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, retries: int = 0, - network_backend: typing.Optional[NetworkBackend] = None, + network_backend: NetworkBackend | None = None, ) -> None: """ A connection pool for making HTTP requests. @@ -167,7 +166,7 @@ def __init__( username, password = proxy_auth username_bytes = enforce_bytes(username, name="proxy_auth") password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( + self._proxy_auth: tuple[bytes, bytes] | None = ( username_bytes, password_bytes, ) @@ -192,12 +191,12 @@ def __init__( self, proxy_origin: Origin, remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, http1: bool = True, http2: bool = False, - network_backend: typing.Optional[NetworkBackend] = None, + network_backend: NetworkBackend | None = None, ) -> None: self._proxy_origin = proxy_origin self._remote_origin = remote_origin @@ -211,7 +210,7 @@ def __init__( SyncBackend() if network_backend is None else network_backend ) self._connect_lock = Lock() - self._connection: typing.Optional[ConnectionInterface] = None + self._connection: ConnectionInterface | None = None self._connect_failed = False def handle_request(self, request: Request) -> Response: diff --git a/httpcore/_synchronization.py b/httpcore/_synchronization.py index 50cfefe0..189f2c21 100644 --- a/httpcore/_synchronization.py +++ b/httpcore/_synchronization.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import threading from types import TracebackType -from typing import Optional, Type from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions @@ -66,7 +67,7 @@ def setup(self) -> None: elif self._backend == "asyncio": self._anyio_lock = anyio.Lock() - async def __aenter__(self) -> "AsyncLock": + async def __aenter__(self) -> AsyncLock: if not self._backend: self.setup() @@ -79,9 +80,9 @@ async def __aenter__(self) -> "AsyncLock": async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: if self._backend == "trio": self._trio_lock.release() @@ -97,14 +98,14 @@ class AsyncThreadLock: In the async case `AsyncThreadLock` is a no-op. """ - def __enter__(self) -> "AsyncThreadLock": + def __enter__(self) -> AsyncThreadLock: return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: pass @@ -133,7 +134,7 @@ def set(self) -> None: elif self._backend == "asyncio": self._anyio_event.set() - async def wait(self, timeout: Optional[float] = None) -> None: + async def wait(self, timeout: float | None = None) -> None: if not self._backend: self.setup() @@ -206,7 +207,7 @@ def __init__(self) -> None: elif self._backend == "asyncio": self._anyio_shield = anyio.CancelScope(shield=True) - def __enter__(self) -> "AsyncShieldCancellation": + def __enter__(self) -> AsyncShieldCancellation: if self._backend == "trio": self._trio_shield.__enter__() elif self._backend == "asyncio": @@ -215,9 +216,9 @@ def __enter__(self) -> "AsyncShieldCancellation": def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: if self._backend == "trio": self._trio_shield.__exit__(exc_type, exc_value, traceback) @@ -239,15 +240,15 @@ class Lock: def __init__(self) -> None: self._lock = threading.Lock() - def __enter__(self) -> "Lock": + def __enter__(self) -> Lock: self._lock.acquire() return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self._lock.release() @@ -263,15 +264,15 @@ class ThreadLock: def __init__(self) -> None: self._lock = threading.Lock() - def __enter__(self) -> "ThreadLock": + def __enter__(self) -> ThreadLock: self._lock.acquire() return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: self._lock.release() @@ -283,7 +284,7 @@ def __init__(self) -> None: def set(self) -> None: self._event.set() - def wait(self, timeout: Optional[float] = None) -> None: + def wait(self, timeout: float | None = None) -> None: if timeout == float("inf"): # pragma: no cover timeout = None if not self._event.wait(timeout=timeout): @@ -305,13 +306,13 @@ class ShieldCancellation: # Thread-synchronous codebases don't support cancellation semantics. # We have this class because we need to mirror the async and sync # cases within our package, but it's just a no-op. - def __enter__(self) -> "ShieldCancellation": + def __enter__(self) -> ShieldCancellation: return self def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: pass diff --git a/httpcore/_trace.py b/httpcore/_trace.py index b122a53e..3a58b79c 100644 --- a/httpcore/_trace.py +++ b/httpcore/_trace.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import inspect import logging from types import TracebackType -from typing import Any, Dict, Optional, Type +from typing import Any from ._models import Request @@ -11,8 +13,8 @@ def __init__( self, name: str, logger: logging.Logger, - request: Optional[Request] = None, - kwargs: Optional[Dict[str, Any]] = None, + request: Request | None = None, + kwargs: dict[str, Any] | None = None, ) -> None: self.name = name self.logger = logger @@ -25,7 +27,7 @@ def __init__( self.should_trace = self.debug or self.trace_extension is not None self.prefix = self.logger.name.split(".")[-1] - def trace(self, name: str, info: Dict[str, Any]) -> None: + def trace(self, name: str, info: dict[str, Any]) -> None: if self.trace_extension is not None: prefix_and_name = f"{self.prefix}.{name}" ret = self.trace_extension(prefix_and_name, info) @@ -44,7 +46,7 @@ def trace(self, name: str, info: Dict[str, Any]) -> None: message = f"{name} {args}" self.logger.debug(message) - def __enter__(self) -> "Trace": + def __enter__(self) -> Trace: if self.should_trace: info = self.kwargs self.trace(f"{self.name}.started", info) @@ -52,9 +54,9 @@ def __enter__(self) -> "Trace": def __exit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: if self.should_trace: if exc_value is None: @@ -64,7 +66,7 @@ def __exit__( info = {"exception": exc_value} self.trace(f"{self.name}.failed", info) - async def atrace(self, name: str, info: Dict[str, Any]) -> None: + async def atrace(self, name: str, info: dict[str, Any]) -> None: if self.trace_extension is not None: prefix_and_name = f"{self.prefix}.{name}" coro = self.trace_extension(prefix_and_name, info) @@ -84,7 +86,7 @@ async def atrace(self, name: str, info: Dict[str, Any]) -> None: message = f"{name} {args}" self.logger.debug(message) - async def __aenter__(self) -> "Trace": + async def __aenter__(self) -> Trace: if self.should_trace: info = self.kwargs await self.atrace(f"{self.name}.started", info) @@ -92,9 +94,9 @@ async def __aenter__(self) -> "Trace": async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, ) -> None: if self.should_trace: if exc_value is None: diff --git a/httpcore/_utils.py b/httpcore/_utils.py index df5dea8f..c44ff93c 100644 --- a/httpcore/_utils.py +++ b/httpcore/_utils.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import select import socket import sys -import typing -def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: +def is_socket_readable(sock: socket.socket | None) -> bool: """ Return whether a socket, as identifed by its file descriptor, is readable. "A socket is readable" means that the read buffer isn't empty, i.e. that calling