diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index aa637db3..53324191 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -41,20 +41,21 @@ jobs: os: - ubuntu-24.04 - ubuntu-24.04-arm + no-httpx: [false] include: - - experimental: false - - upload-coverage: false - - upload-coverage: true - python-version: 3.11 + - no-httpx: true + python-version: 3.13 os: ubuntu-24.04 fail-fast: false uses: ./.github/workflows/reusable-test.yml with: python-version: ${{ matrix.python-version }} os: ${{ matrix.os }} - continue-on-error: ${{ matrix.experimental }} + continue-on-error: false enable-cache: ${{ github.ref_type == 'tag' && 'false' || 'auto' }} - upload-coverage: ${{ matrix.upload-coverage }} + upload-coverage: + ${{ matrix.python-version == 3.11 && matrix.os == 'ubuntu-24.04' }} + no-httpx: ${{ matrix.no-httpx }} secrets: # yamllint disable-line rule:line-length codecov-token: ${{ matrix.upload-coverage && secrets.CODECOV_TOKEN || '' }} diff --git a/.github/workflows/reusable-test.yml b/.github/workflows/reusable-test.yml index 2adf8bed..3d1f2ae8 100644 --- a/.github/workflows/reusable-test.yml +++ b/.github/workflows/reusable-test.yml @@ -19,6 +19,9 @@ on: required: true upload-coverage: type: boolean + no-httpx: + type: boolean + required: true secrets: codecov-token: required: false @@ -28,7 +31,9 @@ env: jobs: test: - name: Test Python ${{ inputs.python-version }} on ${{ inputs.os }} + name: Test Python ${{ inputs.python-version }} on ${{ inputs.os }}${{ + inputs.no-httpx && ' no-httpx' || '' }} + runs-on: ${{ inputs.os }} continue-on-error: ${{ inputs.continue-on-error }} env: @@ -51,10 +56,18 @@ jobs: run: | uv run make pre-commit - name: Run unittests + if: ${{ ! matrix.no_httpx }} env: COLOR: 'yes' run: | uv run make mototest + - name: Run unittests without httpx installed + if: ${{ matrix.no_httpx }} + env: + COLOR: 'yes' + HTTP_BACKEND: 'aiohttp' + run: | + uv run --no-group=httpx make mototest - name: Upload coverage to Codecov if: ${{ inputs.upload-coverage }} # yamllint disable-line rule:line-length diff --git a/Makefile b/Makefile index 3befa492..fb8b934e 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ # Some simple testing tasks (sorry, UNIX only). -# ?= conditional assign, so users can pass options on the CLI instead of manually editing this file +# ?= is conditional assign, so users can pass options on the CLI instead of manually editing this file +HTTP_BACKEND?='all' FLAGS?= pre-commit: @@ -17,7 +18,7 @@ cov cover coverage: pre-commit @echo "open file://`pwd`/htmlcov/index.html" mototest: - python -Wd -X tracemalloc=5 -X faulthandler -m pytest -vv -m "not localonly" -n auto --cov-report term --cov-report html --cov-report xml --cov=aiobotocore --cov=tests --log-cli-level=DEBUG $(FLAGS) aiobotocore tests + python -Wd -X tracemalloc=5 -X faulthandler -m pytest -vv -m "not localonly" -n auto --cov-report term --cov-report html --cov-report xml --cov=aiobotocore --cov=tests --log-cli-level=DEBUG --http-backend=$(HTTP_BACKEND) $(FLAGS) aiobotocore tests clean: rm -rf `find . -name __pycache__` diff --git a/aiobotocore/_endpoint_helpers.py b/aiobotocore/_endpoint_helpers.py index 74696959..e08484e9 100644 --- a/aiobotocore/_endpoint_helpers.py +++ b/aiobotocore/_endpoint_helpers.py @@ -4,6 +4,11 @@ import botocore.retryhandler import wrapt +try: + import httpx +except ImportError: + httpx = None + # Monkey patching: We need to insert the aiohttp exception equivalents # The only other way to do this would be to have another config file :( _aiohttp_retryable_exceptions = [ @@ -14,10 +19,26 @@ asyncio.TimeoutError, ] + botocore.retryhandler.EXCEPTION_MAP['GENERAL_CONNECTION_ERROR'].extend( _aiohttp_retryable_exceptions ) +if httpx is not None: + # See https://www.python-httpx.org/exceptions/#the-exception-hierarchy + # All child exceptions of TransportError, except ProxyError, + # UnsupportedProtocol and CloseError. + _httpx_retryable_exceptions = [ + httpx.TimeoutException, + httpx.ProtocolError, + httpx.ConnectError, + httpx.ReadError, + httpx.WriteError, + ] + botocore.retryhandler.EXCEPTION_MAP['GENERAL_CONNECTION_ERROR'].extend( + _httpx_retryable_exceptions + ) + def _text(s, encoding='utf-8', errors='strict'): if isinstance(s, bytes): diff --git a/aiobotocore/awsrequest.py b/aiobotocore/awsrequest.py index 471a8136..f7ece1b9 100644 --- a/aiobotocore/awsrequest.py +++ b/aiobotocore/awsrequest.py @@ -1,6 +1,11 @@ import botocore.utils from botocore.awsrequest import AWSResponse +try: + import httpx +except ImportError: + httpx = None + class AioAWSResponse(AWSResponse): # Unlike AWSResponse, these return awaitables @@ -9,8 +14,12 @@ async def _content_prop(self): """Content of the response as bytes.""" if self._content is None: + if httpx is not None and isinstance(self.raw, httpx.Response): + read_func = self.raw.aread + else: + read_func = self.raw.read # NOTE: this will cache the data in self.raw - self._content = await self.raw.read() or b'' + self._content = await read_func() or b'' return self._content diff --git a/aiobotocore/config.py b/aiobotocore/config.py index c9139245..f785ee07 100644 --- a/aiobotocore/config.py +++ b/aiobotocore/config.py @@ -4,6 +4,13 @@ from botocore.exceptions import ParamValidationError from aiobotocore.endpoint import DEFAULT_HTTP_SESSION_CLS +from aiobotocore.httpxsession import HttpxSession + +# AWS has a 20 second idle timeout: +# https://web.archive.org/web/20150926192339/https://forums.aws.amazon.com/message.jspa?messageID=215367 +# and aiohttp default timeout is 30s so we set it to something +# reasonable here +DEFAULT_KEEPALIVE_TIMEOUT = 12 class AioConfig(botocore.client.Config): @@ -15,18 +22,16 @@ def __init__( ): super().__init__(**kwargs) - self._validate_connector_args(connector_args) + self._validate_connector_args(connector_args, http_session_cls) self.connector_args = copy.copy(connector_args) self.http_session_cls = http_session_cls if not self.connector_args: self.connector_args = dict() if 'keepalive_timeout' not in self.connector_args: - # AWS has a 20 second idle timeout: - # https://web.archive.org/web/20150926192339/https://forums.aws.amazon.com/message.jspa?messageID=215367 - # and aiohttp default timeout is 30s so we set it to something - # reasonable here - self.connector_args['keepalive_timeout'] = 12 + self.connector_args['keepalive_timeout'] = ( + DEFAULT_KEEPALIVE_TIMEOUT + ) def merge(self, other_config): # Adapted from parent class @@ -35,13 +40,17 @@ def merge(self, other_config): return AioConfig(self.connector_args, **config_options) @staticmethod - def _validate_connector_args(connector_args): + def _validate_connector_args(connector_args, http_session_cls): if connector_args is None: return for k, v in connector_args.items(): # verify_ssl is handled by verify parameter to create_client if k == 'use_dns_cache': + if http_session_cls is HttpxSession: + raise ParamValidationError( + report='Httpx does not support dns caching. https://github.com/encode/httpx/discussions/2211' + ) if not isinstance(v, bool): raise ParamValidationError( report=f'{k} value must be a boolean' @@ -51,12 +60,16 @@ def _validate_connector_args(connector_args): raise ParamValidationError( report=f'{k} value must be an int or None' ) - elif k == 'keepalive_timeout': + elif k in ('keepalive_timeout', 'write_timeout', 'pool_timeout'): if v is not None and not isinstance(v, (float, int)): raise ParamValidationError( report=f'{k} value must be a float/int or None' ) elif k == 'force_close': + if http_session_cls is HttpxSession: + raise ParamValidationError( + report=f'Httpx backend does not currently support {k}.' + ) if not isinstance(v, bool): raise ParamValidationError( report=f'{k} value must be a boolean' @@ -72,6 +85,10 @@ def _validate_connector_args(connector_args): elif k == "resolver": from aiohttp.abc import AbstractResolver + if http_session_cls is HttpxSession: + raise ParamValidationError( + report=f'Httpx backend does not support {k}.' + ) if not isinstance(v, AbstractResolver): raise ParamValidationError( report=f'{k} must be an instance of a AbstractResolver' diff --git a/aiobotocore/endpoint.py b/aiobotocore/endpoint.py index a61e7797..5fafe8f2 100644 --- a/aiobotocore/endpoint.py +++ b/aiobotocore/endpoint.py @@ -17,7 +17,12 @@ from aiobotocore.httpchecksum import handle_checksum_body from aiobotocore.httpsession import AIOHTTPSession from aiobotocore.parsers import AioResponseParserFactory -from aiobotocore.response import StreamingBody +from aiobotocore.response import HttpxStreamingBody, StreamingBody + +try: + import httpx +except ImportError: + httpx = None DEFAULT_HTTP_SESSION_CLS = AIOHTTPSession @@ -50,8 +55,11 @@ async def convert_to_response_dict(http_response, operation_model): elif operation_model.has_event_stream_output: response_dict['body'] = http_response.raw elif operation_model.has_streaming_output: - length = response_dict['headers'].get('content-length') - response_dict['body'] = StreamingBody(http_response.raw, length) + if httpx and isinstance(http_response.raw, httpx.Response): + response_dict['body'] = HttpxStreamingBody(http_response.raw) + else: + length = response_dict['headers'].get('content-length') + response_dict['body'] = StreamingBody(http_response.raw, length) else: response_dict['body'] = await http_response.content return response_dict diff --git a/aiobotocore/httpchecksum.py b/aiobotocore/httpchecksum.py index 39386a63..7f7cebe7 100644 --- a/aiobotocore/httpchecksum.py +++ b/aiobotocore/httpchecksum.py @@ -12,7 +12,12 @@ ) from aiobotocore._helpers import resolve_awaitable -from aiobotocore.response import StreamingBody +from aiobotocore.response import HttpxStreamingBody, StreamingBody + +try: + import httpx +except ImportError: + httpx = None class AioAwsChunkedWrapper(AwsChunkedWrapper): @@ -93,6 +98,31 @@ def _validate_checksum(self): raise FlexibleChecksumError(error_msg=error_msg) +# TODO: fix inheritance? read & _validate_checksum are the exact same as above +# only diff is super class and how to call __init__ +class HttpxStreamingChecksumBody(HttpxStreamingBody): + def __init__(self, raw_stream, content_length, checksum, expected): + # HttpxStreamingbody doesn't use content_length + super().__init__(raw_stream) + self._checksum = checksum + self._expected = expected + + async def read(self, amt=None): + chunk = await super().read(amt=amt) + self._checksum.update(chunk) + if amt is None or (not chunk and amt > 0): + self._validate_checksum() + return chunk + + def _validate_checksum(self): + if self._checksum.digest() != base64.b64decode(self._expected): + error_msg = ( + f"Expected checksum {self._expected} did not match calculated " + f"checksum: {self._checksum.b64digest()}" + ) + raise FlexibleChecksumError(error_msg=error_msg) + + async def handle_checksum_body( http_response, response, context, operation_model ): @@ -139,7 +169,11 @@ async def handle_checksum_body( def _handle_streaming_response(http_response, response, algorithm): checksum_cls = _CHECKSUM_CLS.get(algorithm) header_name = f"x-amz-checksum-{algorithm}" - return StreamingChecksumBody( + if httpx is not None and isinstance(http_response.raw, httpx.Response): + streaming_cls = HttpxStreamingChecksumBody + else: + streaming_cls = StreamingChecksumBody + return streaming_cls( http_response.raw, response["headers"].get("content-length"), checksum_cls(), diff --git a/aiobotocore/httpsession.py b/aiobotocore/httpsession.py index 1ab04bdd..87b31c2f 100644 --- a/aiobotocore/httpsession.py +++ b/aiobotocore/httpsession.py @@ -40,6 +40,7 @@ from multidict import CIMultiDict import aiobotocore.awsrequest +import aiobotocore.config # avoid circular import from aiobotocore._endpoint_helpers import _IOBaseWrapper, _text @@ -83,10 +84,9 @@ def __init__( self._timeout = timeout self._connector_args = connector_args if self._connector_args is None: - # AWS has a 20 second idle timeout: - # https://web.archive.org/web/20150926192339/https://forums.aws.amazon.com/message.jspa?messageID=215367 - # aiohttp default timeout is 30s so set something reasonable here - self._connector_args = dict(keepalive_timeout=12) + self._connector_args = dict( + keepalive_timeout=aiobotocore.config.DEFAULT_KEEPALIVE_TIMEOUT + ) self._max_pool_connections = max_pool_connections self._socket_options = socket_options diff --git a/aiobotocore/httpxsession.py b/aiobotocore/httpxsession.py new file mode 100644 index 00000000..d0c55ada --- /dev/null +++ b/aiobotocore/httpxsession.py @@ -0,0 +1,265 @@ +from __future__ import annotations + +import asyncio +import io +import os +import socket +import ssl +from collections.abc import AsyncIterable, Iterable +from typing import TYPE_CHECKING, Any, cast + +import botocore +from botocore.awsrequest import AWSPreparedRequest +from botocore.httpsession import ( + MAX_POOL_CONNECTIONS, + ConnectionClosedError, + ConnectTimeoutError, + EndpointConnectionError, + HTTPClientError, + ProxyConnectionError, + ReadTimeoutError, + create_urllib3_context, + ensure_boolean, + get_cert_path, + logger, +) +from multidict import CIMultiDict + +import aiobotocore.awsrequest +import aiobotocore.config # avoid circular import +from aiobotocore._endpoint_helpers import _text + +try: + import httpx +except ImportError: + httpx = None + +if TYPE_CHECKING: + from ssl import SSLContext + + +class HttpxSession: + def __init__( + self, + verify: bool = True, + proxies: dict[str, str] | None = None, # {scheme: url} + timeout: float | list[float] | tuple[float, float] | None = None, + max_pool_connections: int = MAX_POOL_CONNECTIONS, + socket_options: list[Any] | None = None, + client_cert: str | tuple[str, str] | None = None, + proxies_config: dict[str, str] | None = None, + connector_args: dict[str, Any] | None = None, + ): + if httpx is None: # pragma: no cover + raise RuntimeError( + "Using HttpxSession requires httpx to be installed" + ) + if proxies or proxies_config: + raise NotImplementedError( + "Proxy support not implemented with httpx as backend." + ) + + if connector_args is None: + self._connector_args: dict[str, Any] = { + 'keepalive_timeout': aiobotocore.config.DEFAULT_KEEPALIVE_TIMEOUT + } + else: + self._connector_args = connector_args + + # TODO: neither this nor AIOHTTPSession handles socket_options + self._session: httpx.AsyncClient | None = None + conn_timeout: float | None + read_timeout: float | None + + if isinstance(timeout, (list, tuple)): + conn_timeout, read_timeout = timeout + else: + conn_timeout = read_timeout = timeout + + write_timeout = self._connector_args.get('write_timeout', 5) + pool_timeout = self._connector_args.get('pool_timeout', 5) + + self._timeout = httpx.Timeout( + connect=conn_timeout, + read=read_timeout, + write=write_timeout, + pool=pool_timeout, + ) + + self._cert_file = None + self._key_file = None + if isinstance(client_cert, str): + self._cert_file = client_cert + elif isinstance(client_cert, tuple): + self._cert_file, self._key_file = client_cert + elif client_cert is not None: + raise TypeError(f'{client_cert} must be str or tuple[str,str]') + + if 'use_dns_cache' in self._connector_args: + raise NotImplementedError( + "DNS caching is not implemented by httpx. https://github.com/encode/httpx/discussions/2211" + ) + if 'force_close' in self._connector_args: + raise NotImplementedError("Not supported with httpx as backend.") + if 'resolver' in self._connector_args: + raise NotImplementedError("Not supported with httpx as backend.") + + self._max_pool_connections = max_pool_connections + self._socket_options = socket_options + if socket_options is None: + self._socket_options = [] + + # TODO [httpx]: clean up + ssl_context: SSLContext | None = None + self._verify: bool | str | SSLContext = verify + if not verify: + return + if 'ssl_context' in self._connector_args: + self._verify = cast( + 'SSLContext', self._connector_args['ssl_context'] + ) + return + + ssl_context = self._get_ssl_context() + + # inline self._setup_ssl_cert + ca_certs = get_cert_path(verify) + if ca_certs: + ssl_context.load_verify_locations(ca_certs, None, None) + if ssl_context is not None: + self._verify = ssl_context + + async def __aenter__(self): + assert not self._session + + limits = httpx.Limits( + max_connections=self._max_pool_connections, + keepalive_expiry=self._connector_args['keepalive_timeout'], + ) + + # TODO [httpx]: I put logic here to minimize diff / accidental downstream + # consequences - but can probably put this logic in __init__ + if self._cert_file and self._key_file is None: + cert = self._cert_file + elif self._cert_file: + cert = (self._cert_file, self._key_file) + else: + cert = None + + self._session = httpx.AsyncClient( + timeout=self._timeout, limits=limits, cert=cert + ) + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + if self._session: + await self._session.__aexit__(exc_type, exc_val, exc_tb) + self._session = None + self._connector = None + + def _get_ssl_context(self) -> SSLContext: + ssl_context = create_urllib3_context() + if self._cert_file: + ssl_context.load_cert_chain(self._cert_file, self._key_file) + return ssl_context + + async def close(self) -> None: + await self.__aexit__(None, None, None) + + async def send( + self, request: AWSPreparedRequest + ) -> aiobotocore.awsrequest.AioAWSResponse: + try: + url = request.url + headers = request.headers + + # currently no support for BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER + if ensure_boolean( + os.environ.get('BOTO_EXPERIMENTAL__ADD_PROXY_HOST_HEADER', '') + ): + raise NotImplementedError( + 'httpx implementation of aiobotocore does not (currently) support proxies' + ) + + headers_ = CIMultiDict( + (z[0], _text(z[1], encoding='utf-8')) for z in headers.items() + ) + + # https://github.com/boto/botocore/issues/1255 + headers_['Accept-Encoding'] = 'identity' + + content: AsyncIterable | bytes | bytearray | str | None = None + + async def to_async_iterable(stream: Iterable) -> AsyncIterable: + for item in stream: + yield item + await asyncio.sleep(0) # Yield control to event loop + + if isinstance(request.body, io.IOBase) and not isinstance( + request.body, AsyncIterable + ): + content = to_async_iterable(request.body) + else: + content = request.body + + # The target gets used as the HTTP target instead of the URL path + # it does not get normalized or otherwise processed, which is important + # since arbitrary dots and slashes are valid as key paths. + # See test_basic_s3.test_non_normalized_key_paths + # This way of using it is currently ~undocumented, but recommended in + # https://github.com/encode/httpx/discussions/1805#discussioncomment-8975989 + extensions = {"target": bytes(url, encoding='utf-8')} + + assert self._session is not None + + httpx_request = self._session.build_request( + method=request.method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + # auth, follow_redirects + response = await self._session.send(httpx_request, stream=True) + response_headers = botocore.compat.HTTPHeaders.from_pairs( + response.headers.items() + ) + + http_response = aiobotocore.awsrequest.AioAWSResponse( + str(response.url), + response.status_code, + response_headers, + response, + ) + + if not request.stream_output: + # Cause the raw stream to be exhausted immediately. We do it + # this way instead of using preload_content because + # preload_content will never buffer chunked responses + await http_response.content + + return http_response + + except httpx.ConnectError as e: + raise EndpointConnectionError(endpoint_url=request.url, error=e) + except (socket.gaierror,) as e: + raise EndpointConnectionError(endpoint_url=request.url, error=e) + except asyncio.TimeoutError as e: + raise ReadTimeoutError(endpoint_url=request.url, error=e) + except httpx.ReadTimeout as e: + raise ReadTimeoutError(endpoint_url=request.url, error=e) + except httpx.TimeoutException as e: + raise ConnectTimeoutError(endpoint_url=request.url, error=e) + except httpx.ProxyError as e: + raise ProxyConnectionError(endpoint_url=request.url, error=e) + except httpx.CloseError as e: + raise ConnectionClosedError(endpoint_url=request.url, error=e) + except ssl.SSLError: + raise botocore.exceptions.SSLError + + except NotImplementedError: + raise # Avoid turning it into HTTPClientError. + except Exception as e: + message = 'Exception received when sending httpx HTTP request' + logger.debug(message, exc_info=True) + raise HTTPClientError(error=e) diff --git a/aiobotocore/response.py b/aiobotocore/response.py index 71a18241..c3b8c9e4 100644 --- a/aiobotocore/response.py +++ b/aiobotocore/response.py @@ -29,8 +29,11 @@ class StreamingBody(wrapt.ObjectProxy): _DEFAULT_CHUNK_SIZE = 1024 - def __init__(self, raw_stream: aiohttp.StreamReader, content_length: str): + def __init__( + self, raw_stream: aiohttp.ClientResponse, content_length: str + ): super().__init__(raw_stream) + self.__wrapped__: aiohttp.ClientResponse self._self_content_length = content_length self._self_amount_read = 0 @@ -130,6 +133,32 @@ def _verify_content_length(self): def tell(self): return self._self_amount_read + async def aclose(self) -> None: + await self.__wrapped__.wait_for_close() + + +# wraps httpx.Response +class HttpxStreamingBody(wrapt.ObjectProxy): + async def read(self, amt=None): + if amt is not None: + # We could do a fancy thing here and start doing calls to + # aiter_bytes()/aiter_raw() and keep state + raise ValueError( + "httpx.Response.aread does not support reading a specific number of bytes" + ) + return await self.__wrapped__.aread() + + async def __aenter__(self): + # use AsyncClient.stream somehow? + # See "manual mode" at https://www.python-httpx.org/async/#streaming-responses + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # When support for anyio/trio is added this needs a shielded cancelscope to + # avoid swallowing exceptions. + # See https://github.com/python-trio/trio/issues/455 + await self.__wrapped__.aclose() + async def get_response(operation_model, http_response): protocol = operation_model.metadata['protocol'] diff --git a/pyproject.toml b/pyproject.toml index 0488f609..02627d2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ awscli = [ boto3 = [ "boto3 >= 1.37.2, < 1.37.4", ] +httpx = ["httpx"] [project.urls] Repository = "https://github.com/aio-libs/aiobotocore" @@ -80,11 +81,15 @@ dev = [ "time-machine >= 2.15.0, < 3", # Used in test_signers.py "tomli; python_version<'3.11'", # Used in test_version.py ] +httpx = [ + "httpx" +] [tool.uv] default-groups = [ "botocore-dev", "dev", + "httpx", ] required-version = "~=0.7.0" diff --git a/tests/botocore_tests/unit/test_protocols.py b/tests/botocore_tests/unit/test_protocols.py index b810c4ab..f88d1009 100644 --- a/tests/botocore_tests/unit/test_protocols.py +++ b/tests/botocore_tests/unit/test_protocols.py @@ -148,6 +148,9 @@ def iter_chunks(self): ) async def test_output_compliance(json_description, case, basename): service_description = copy.deepcopy(json_description) + # FIXME: case is globally shared so breaks if parametrizing or re-running the test. + # There's probably a better way of fixing this than doing a copy. + case = copy.deepcopy(case) operation_name = case.get('given', {}).get('name', 'OperationName') service_description['operations'] = { operation_name: case, diff --git a/tests/botocore_tests/unit/test_tokens.py b/tests/botocore_tests/unit/test_tokens.py index 9cf23a77..765d9152 100644 --- a/tests/botocore_tests/unit/test_tokens.py +++ b/tests/botocore_tests/unit/test_tokens.py @@ -10,6 +10,7 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +from copy import deepcopy from unittest import mock import dateutil.parser @@ -286,6 +287,10 @@ async def test_sso_token_provider_refresh(test_case): cache_key = "d033e22ae348aeb5660fc2140aec35850c4da997" token_cache = {} + # deepcopy the test case so the test can be parametrized against the same + # test case w/ aiohttp & httpx + test_case = deepcopy(test_case) + # Prepopulate the token cache cached_token = test_case.pop("cachedToken", None) if cached_token: diff --git a/tests/conftest.py b/tests/conftest.py index 7f145abb..235ab662 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,20 +1,34 @@ +from __future__ import annotations + import asyncio import multiprocessing import os import random +import re import string import tempfile from contextlib import AsyncExitStack, ExitStack from itertools import chain +from typing import TYPE_CHECKING, Literal from unittest.mock import patch import aiohttp +try: + import httpx +except ImportError: + http = None + # Third Party import pytest import aiobotocore.session from aiobotocore.config import AioConfig +from aiobotocore.httpsession import AIOHTTPSession +from aiobotocore.httpxsession import HttpxSession + +if TYPE_CHECKING: + from _pytest.nodes import Node host = '127.0.0.1' @@ -91,6 +105,14 @@ async def assert_num_uploads_found( ) +# Used by test_fail_proxy_request as it will fail during setup, so needs to +# be skipped before `skipif` would be able to skip the test. +@pytest.fixture +def skip_httpx(current_http_backend: str) -> None: + if current_http_backend == 'httpx': + pytest.skip('proxy support not implemented for httpx') + + @pytest.fixture def aa_fail_proxy_config(monkeypatch): # NOTE: name of this fixture must be alphabetically first to run first @@ -109,7 +131,7 @@ def aa_succeed_proxy_config(monkeypatch): @pytest.fixture -def session(): +def session() -> aiobotocore.session.AioSession: session = aiobotocore.session.AioSession() return session @@ -139,13 +161,33 @@ def s3_verify(): return None +@pytest.fixture +def current_http_backend(request) -> Literal['httpx', 'aiohttp']: + for mark in request.node.iter_markers("config_kwargs"): + assert len(mark.args) == 1 + assert isinstance(mark.args[0], dict) + http_session_cls = mark.args[0].get('http_session_cls') + if http_session_cls is HttpxSession: + return 'httpx' + # since aiohttp is default we don't test explicitly setting it + elif http_session_cls is AIOHTTPSession: # pragma: no cover + return 'aiohttp' + return 'aiohttp' + + +def read_kwargs(node: Node) -> dict[str, object]: + config_kwargs: dict[str, object] = {} + for mark in node.iter_markers("config_kwargs"): + assert not mark.kwargs, config_kwargs + assert len(mark.args) == 1 + assert isinstance(mark.args[0], dict) + config_kwargs.update(mark.args[0]) + return config_kwargs + + @pytest.fixture def config(request, region, signature_version): - config_kwargs = request.node.get_closest_marker("config_kwargs") or {} - if config_kwargs: - assert not config_kwargs.kwargs, config_kwargs - assert len(config_kwargs.args) == 1 - config_kwargs = config_kwargs.args[0] + config_kwargs = read_kwargs(request.node) connect_timeout = read_timout = 5 if _PYCHARM_HOSTED: @@ -246,14 +288,17 @@ async def alternative_s3_client( moto_server, mocking_test, aws_auth, + request, ): kw = {'endpoint_url': moto_server, **aws_auth} if mocking_test else {} + kwargs = read_kwargs(request.node) config = AioConfig( region_name=alternative_region, signature_version=signature_version, read_timeout=5, connect_timeout=5, + **kwargs, ) async with session.create_client( @@ -522,9 +567,16 @@ def fin(): @pytest.fixture -async def aio_session(): - async with aiohttp.ClientSession() as session: - yield session +async def aio_session(current_http_backend: Literal['httpx', 'aiohttp']): + if current_http_backend == 'httpx': + assert httpx is not None + async with httpx.AsyncClient() as client: + yield client + elif current_http_backend == 'aiohttp': + async with aiohttp.ClientSession() as session: + yield session + else: # pragma: no cover + raise AssertionError("unknown http backend") def pytest_configure(): @@ -596,4 +648,52 @@ async def exit_stack(): yield es +def pytest_addoption(parser: pytest.Parser): + parser.addoption( + "--http-backend", + default='aiohttp', + choices=['aiohttp', 'httpx', 'all'], + required=False, + help='Specify http backend to run tests against.', + ) + + +def pytest_generate_tests(metafunc): + """Parametrize all tests to run with both aiohttp and httpx as backend. + This is not a super clean solution, as some tests will not differ at all with + different http backends.""" + metafunc.parametrize( + '', + [ + pytest.param(id='aiohttp'), + pytest.param( + id='httpx', + marks=pytest.mark.config_kwargs( + {'http_session_cls': HttpxSession} + ), + ), + ], + ) + + +def pytest_collection_modifyitems(config: pytest.Config, items): + """Mark parametrized tests for skipping in case the corresponding backend is not enabled.""" + http_backend = config.getoption("--http-backend") + if http_backend == 'all': + return + if http_backend == 'aiohttp': + ignore_backend = 'httpx' + else: + assert ( + httpx is not None + ), "Cannot run httpx as backend if it's not installed." + ignore_backend = 'aiohttp' + backend_skip = pytest.mark.skip( + reason='Selected not to run with --http-backend' + ) + for item in items: + if re.match(rf'.*\[.*{ignore_backend}.*\]', item.name): + item.add_marker(backend_skip) + + pytest_plugins = ['tests.mock_server'] diff --git a/tests/test_basic_s3.py b/tests/test_basic_s3.py index 555ff00a..8056378f 100644 --- a/tests/test_basic_s3.py +++ b/tests/test_basic_s3.py @@ -2,9 +2,15 @@ import base64 import hashlib from collections import defaultdict +from typing import Callable import aioitertools import botocore.retries.adaptive + +try: + import httpx +except ImportError: + httpx = None import pytest import aiobotocore.retries.adaptive @@ -38,7 +44,7 @@ async def test_can_make_request_no_verify(s3_client): async def test_fail_proxy_request( - aa_fail_proxy_config, s3_client, monkeypatch + skip_httpx, aa_fail_proxy_config, s3_client, monkeypatch ): # based on test_can_make_request with pytest.raises(httpsession.ProxyConnectionError): @@ -175,7 +181,9 @@ async def test_result_key_iters(s3_client, bucket_name, create_object): async def test_can_get_and_put_object( - s3_client, create_object, bucket_name: str + s3_client: aiobotocore.client.AioBaseClient, + create_object: Callable, + bucket_name: str, ): key_name = 'foobarbaz' await create_object(key_name, body='body contents') @@ -183,7 +191,7 @@ async def test_can_get_and_put_object( resp = await s3_client.get_object(Bucket=bucket_name, Key=key_name) data = await resp['Body'].read() # TODO: think about better api and make behavior like in aiohttp - resp['Body'].close() + await resp['Body'].aclose() assert data == b'body contents' # now test checksum'd file @@ -254,11 +262,19 @@ async def test_get_object_stream_wrapper( await create_object('foobarbaz', body='body contents') response = await s3_client.get_object(Bucket=bucket_name, Key='foobarbaz') body = response['Body'] - chunk1 = await body.read(1) - chunk2 = await body.read() + if httpx and isinstance(body, httpx.Response): + # httpx does not support `.aread(1)` + byte_iterator = body.aiter_raw(1) + chunk1 = await byte_iterator.__anext__() + chunk2 = b"" + async for b in byte_iterator: + chunk2 += b + else: + chunk1 = await body.read(1) + chunk2 = await body.read() assert chunk1 == b'b' assert chunk2 == b'ody contents' - response['Body'].close() + await body.aclose() async def test_get_object_stream_context( @@ -267,7 +283,8 @@ async def test_get_object_stream_context( await create_object('foobarbaz', body='body contents') response = await s3_client.get_object(Bucket=bucket_name, Key='foobarbaz') async with response['Body'] as stream: - await stream.read() + data = await stream.read() + assert data == b'body contents' async def test_paginate_max_items( @@ -362,7 +379,7 @@ async def test_unicode_key_put_list(s3_client, bucket_name, create_object): assert parsed['Contents'][0]['Key'] == key_name parsed = await s3_client.get_object(Bucket=bucket_name, Key=key_name) data = await parsed['Body'].read() - parsed['Body'].close() + await parsed['Body'].aclose() assert data == b'foo' @@ -415,7 +432,7 @@ async def test_copy_with_quoted_char(s3_client, create_object, bucket_name): # Now verify we can retrieve the copied object. resp = await s3_client.get_object(Bucket=bucket_name, Key=key_name2) data = await resp['Body'].read() - resp['Body'].close() + await resp['Body'].aclose() assert data == b'foo' @@ -433,7 +450,7 @@ async def test_copy_with_query_string(s3_client, create_object, bucket_name): # Now verify we can retrieve the copied object. resp = await s3_client.get_object(Bucket=bucket_name, Key=key_name2) data = await resp['Body'].read() - resp['Body'].close() + await resp['Body'].aclose() assert data == b'foo' @@ -451,7 +468,7 @@ async def test_can_copy_with_dict_form(s3_client, create_object, bucket_name): # Now verify we can retrieve the copied object. resp = await s3_client.get_object(Bucket=bucket_name, Key=key_name2) data = await resp['Body'].read() - resp['Body'].close() + await resp['Body'].aclose() assert data == b'foo' @@ -474,7 +491,7 @@ async def test_can_copy_with_dict_form_with_version( # Now verify we can retrieve the copied object. resp = await s3_client.get_object(Bucket=bucket_name, Key=key_name2) data = await resp['Body'].read() - resp['Body'].close() + await resp['Body'].aclose() assert data == b'foo' @@ -512,11 +529,17 @@ async def test_presign_with_existing_query_string_values( 'get_object', Params=params ) # Try to retrieve the object using the presigned url. - - async with aio_session.get(presigned_url) as resp: - data = await resp.read() - assert resp.headers['Content-Disposition'] == content_disposition - assert data == b'foo' + # TODO: compatibility layer between httpx.AsyncClient and aiohttp.ClientSession? + if httpx and isinstance(aio_session, httpx.AsyncClient): + async with aio_session.stream("GET", presigned_url) as resp: + data = await resp.aread() + headers = resp.headers + else: + async with aio_session.get(presigned_url) as resp: + data = await resp.read() + headers = resp.headers + assert headers['Content-Disposition'] == content_disposition + assert data == b'foo' @pytest.mark.parametrize('region', ['us-east-1']) @@ -541,9 +564,13 @@ async def test_presign_sigv4( ), msg # Try to retrieve the object using the presigned url. - async with aio_session.get(presigned_url) as resp: - data = await resp.read() - assert data == b'foo' + if httpx and isinstance(aio_session, httpx.AsyncClient): + async with aio_session.stream("GET", presigned_url) as resp: + data = await resp.aread() + else: + async with aio_session.get(presigned_url) as resp: + data = await resp.read() + assert data == b'foo' @pytest.mark.parametrize('signature_version', ['s3v4']) @@ -559,7 +586,7 @@ async def test_can_follow_signed_url_redirect( Bucket=bucket_name, Key='foobarbaz' ) data = await resp['Body'].read() - resp['Body'].close() + await resp['Body'].aclose() assert data == b'foo' diff --git a/tests/test_config.py b/tests/test_config.py index c2b0cddf..eea70068 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -7,14 +7,15 @@ from aiobotocore.config import AioConfig from aiobotocore.httpsession import AIOHTTPSession +from aiobotocore.httpxsession import HttpxSession from aiobotocore.session import AioSession, get_session from tests.mock_server import AIOServer -async def test_connector_args(): +async def test_connector_args(current_http_backend: str): with pytest.raises(ParamValidationError): # wrong type - connector_args = dict(use_dns_cache=1) + connector_args: dict[str, object] = dict(use_dns_cache=1) AioConfig(connector_args) with pytest.raises(ParamValidationError): @@ -52,6 +53,23 @@ async def test_connector_args(): connector_args = dict(foo="1") AioConfig(connector_args) + with pytest.raises( + ParamValidationError, + match='Httpx does not support dns caching. https://github.com/encode/httpx/discussions/2211', + ): + AioConfig({'use_dns_cache': True}, http_session_cls=HttpxSession) + + with pytest.raises( + ParamValidationError, + match='Httpx backend does not currently support force_close.', + ): + AioConfig({'force_close': True}, http_session_cls=HttpxSession) + + with pytest.raises( + ParamValidationError, match='Httpx backend does not support resolver.' + ): + AioConfig({'resolver': True}, http_session_cls=HttpxSession) + # Test valid configs: AioConfig({"ttl_dns_cache": None}) AioConfig({"ttl_dns_cache": 1}) diff --git a/tests/test_lambda.py b/tests/test_lambda.py index 797f5919..4a1f6587 100644 --- a/tests/test_lambda.py +++ b/tests/test_lambda.py @@ -5,6 +5,11 @@ # Third Party import botocore.client + +try: + import httpx +except ImportError: + httpx = None import pytest @@ -41,10 +46,13 @@ def lambda_handler(event, context): return _process_lambda(lambda_src) -async def test_run_lambda(iam_client, lambda_client, aws_lambda_zip): +async def test_run_lambda( + iam_client, lambda_client, aws_lambda_zip, current_http_backend +): + function_name = f'test-function-{current_http_backend}' role_arn = await _get_role_arn(iam_client, 'test-iam-role') lambda_response = await lambda_client.create_function( - FunctionName='test-function', + FunctionName=function_name, Runtime='python3.9', Role=role_arn, Handler='lambda_function.lambda_handler', @@ -53,10 +61,10 @@ async def test_run_lambda(iam_client, lambda_client, aws_lambda_zip): Publish=True, Code={'ZipFile': aws_lambda_zip}, ) - assert lambda_response['FunctionName'] == 'test-function' + assert lambda_response['FunctionName'] == function_name invoke_response = await lambda_client.invoke( - FunctionName="test-function", + FunctionName=function_name, InvocationType="RequestResponse", LogType='Tail', Payload=json.dumps({"hello": "world"}), @@ -69,3 +77,5 @@ async def test_run_lambda(iam_client, lambda_client, aws_lambda_zip): assert json.loads(data) == {'statusCode': 200, "body": {"hello": "world"}} assert b"{'hello': 'world'}" in log_result + + await lambda_client.delete_function(FunctionName=function_name) diff --git a/tests/test_waiter.py b/tests/test_waiter.py index e29651e6..6e874055 100644 --- a/tests/test_waiter.py +++ b/tests/test_waiter.py @@ -27,7 +27,8 @@ def test_create_waiter_with_client( assert asyncio.iscoroutinefunction(waiter.wait) -async def test_sqs(cloudformation_client): +async def test_sqs(cloudformation_client, current_http_backend: str): + stack_name = 'my-stack-{current_http_backend}' cloudformation_template = """{ "AWSTemplateFormatVersion": "2010-09-09", "Resources": { @@ -42,11 +43,13 @@ async def test_sqs(cloudformation_client): # Create stack resp = await cloudformation_client.create_stack( - StackName='my-stack', TemplateBody=cloudformation_template + StackName=stack_name, TemplateBody=cloudformation_template ) assert resp['ResponseMetadata']['HTTPStatusCode'] == 200 # wait for complete waiter = cloudformation_client.get_waiter('stack_create_complete') - await waiter.wait(StackName='my-stack') + await waiter.wait(StackName=stack_name) + + await cloudformation_client.delete_stack(StackName=stack_name) diff --git a/uv.lock b/uv.lock index 67c2f3b0..e6341b11 100644 --- a/uv.lock +++ b/uv.lock @@ -27,6 +27,9 @@ awscli = [ boto3 = [ { name = "boto3" }, ] +httpx = [ + { name = "httpx" }, +] [package.dev-dependencies] botocore-dev = [ @@ -48,6 +51,9 @@ dev = [ { name = "time-machine" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] +httpx = [ + { name = "httpx" }, +] [package.metadata] requires-dist = [ @@ -56,12 +62,13 @@ requires-dist = [ { name = "awscli", marker = "extra == 'awscli'", specifier = ">=1.38.2,<1.38.4" }, { name = "boto3", marker = "extra == 'boto3'", specifier = ">=1.37.2,<1.37.4" }, { name = "botocore", specifier = ">=1.37.2,<1.37.4" }, + { name = "httpx", marker = "extra == 'httpx'" }, { name = "jmespath", specifier = ">=0.7.1,<2.0.0" }, { name = "multidict", specifier = ">=6.0.0,<7.0.0" }, { name = "python-dateutil", specifier = ">=2.1,<3.0.0" }, { name = "wrapt", specifier = ">=1.10.10,<2.0.0" }, ] -provides-extras = ["awscli", "boto3"] +provides-extras = ["awscli", "boto3", "httpx"] [package.metadata.requires-dev] botocore-dev = [ @@ -83,6 +90,7 @@ dev = [ { name = "time-machine", specifier = ">=2.15.0,<3" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] +httpx = [{ name = "httpx" }] [[package]] name = "aiohappyeyeballs" @@ -233,6 +241,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/89/03/a851e84fcbb85214dc637b6378121ef9a0dd61b4c65264675d8a5c9b1ae7/antlr4_python3_runtime-4.13.2-py3-none-any.whl", hash = "sha256:fe3835eb8d33daece0e799090eda89719dbccee7aa39ef94eed3818cafa5a7e8", size = 144462, upload-time = "2024-08-03T19:00:11.134Z" }, ] +[[package]] +name = "anyio" +version = "4.5.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.9'" }, + { name = "idna", marker = "python_full_version < '3.9'" }, + { name = "sniffio", marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/f9/9a7ce600ebe7804daf90d4d48b1c0510a4561ddce43a596be46676f82343/anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b", size = 171293 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/b4/f7e396030e3b11394436358ca258a81d6010106582422f23443c16ca1873/anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f", size = 89766 }, +] + +[[package]] +name = "anyio" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "idna", marker = "python_full_version >= '3.9'" }, + { name = "sniffio", marker = "python_full_version >= '3.9'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.9' and python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, +] + [[package]] name = "async-timeout" version = "5.0.1" @@ -855,6 +900,44 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ae/4f/7297663840621022bc73c22d7d9d80dbc78b4db6297f764b545cd5dd462d/graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f", size = 203416, upload-time = "2025-01-26T16:36:24.868Z" }, ] +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", version = "4.5.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "anyio", version = "4.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + [[package]] name = "identify" version = "2.6.10" @@ -2130,6 +2213,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + [[package]] name = "sympy" version = "1.13.3"