diff --git a/.codecov.yml b/.codecov.yml index 30809053e16..e21d45ac7b2 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,5 +1,5 @@ codecov: - branch: 3.9 + branch: master notify: after_n_builds: 13 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index deb81163faf..9cf1501e811 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,6 +12,8 @@ updates: # Maintain dependencies for Python - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies schedule: @@ -31,6 +33,8 @@ updates: # Maintain dependencies for Python aiohttp backport - package-ecosystem: "pip" directory: "/" + allow: + - dependency-type: "all" labels: - dependencies target-branch: "3.11" diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index a6a58cef9c2..df27a9108d6 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -125,15 +125,13 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.8, 3.9, '3.10', '3.11', '3.12'] + pyver: [3.9, '3.10', '3.11', '3.12'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] exclude: - os: macos no-extensions: 'Y' - - os: macos - pyver: 3.8 - os: windows no-extensions: 'Y' include: @@ -198,7 +196,7 @@ jobs: PIP_USER: 1 run: >- PATH="${HOME}/Library/Python/3.11/bin:${HOME}/.local/bin:${PATH}" - pytest + pytest --junitxml=junit.xml shell: bash - name: Re-run the failing tests with maximum verbosity if: failure() @@ -234,6 +232,11 @@ jobs: steps.python-install.outputs.python-version }} token: ${{ secrets.CODECOV_TOKEN }} + - name: Upload test results to Codecov + if: ${{ !cancelled() }} + uses: codecov/test-results-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} check: # This job does nothing and is only used for the branch protection if: always() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dc3e65cf52f..0edf03d8db7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -107,6 +107,7 @@ repos: - id: flake8 additional_dependencies: - flake8-docstrings==1.6.0 + - flake8-no-implicit-concat==0.3.4 - flake8-requirements==1.7.8 exclude: "^docs/" - repo: https://github.com/Lucas-C/pre-commit-hooks-markup diff --git a/CHANGES/3945.deprecation.rst b/CHANGES/3945.deprecation.rst new file mode 100644 index 00000000000..07f8566881a --- /dev/null +++ b/CHANGES/3945.deprecation.rst @@ -0,0 +1 @@ +Deprecate obsolete `timeout: float` and `receive_timeout: Optional[float]` in `ClientSession.ws_connect()`. Change default websocket receive timeout from `None` to `10.0`. diff --git a/CHANGES/5343.bugfix b/CHANGES/5343.bugfix new file mode 100644 index 00000000000..4e33071ea94 --- /dev/null +++ b/CHANGES/5343.bugfix @@ -0,0 +1 @@ +Fixed StreamResponse.prepared to return True after EOF is sent -- by :user:`arthurdarcet`. diff --git a/CHANGES/6485.bugfix.rst b/CHANGES/6485.bugfix.rst new file mode 100644 index 00000000000..b1d912f1579 --- /dev/null +++ b/CHANGES/6485.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Response.text`` when body is a ``Payload`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/6494.bugfix.rst b/CHANGES/6494.bugfix.rst new file mode 100644 index 00000000000..3827644f0d1 --- /dev/null +++ b/CHANGES/6494.bugfix.rst @@ -0,0 +1 @@ +Added support for URL credentials with empty (zero-length) username, e.g. ``https://:password@host`` -- by :user:`shuckc` diff --git a/CHANGES/6732.bugfix b/CHANGES/6732.bugfix new file mode 100644 index 00000000000..a460d7cd695 --- /dev/null +++ b/CHANGES/6732.bugfix @@ -0,0 +1 @@ +Fixed handling of some file-like objects (e.g. ``tarfile.extractfile()``) which raise ``AttributeError`` instead of ``OSError`` when ``fileno`` fails for streaming payload data -- by :user:`ReallyReivax`. diff --git a/CHANGES/6764.doc.rst b/CHANGES/6764.doc.rst new file mode 100644 index 00000000000..dea2019fc76 --- /dev/null +++ b/CHANGES/6764.doc.rst @@ -0,0 +1 @@ +Clarified that auth parameter in ClientSession will persist and be included with any request to any origin, even during redirects to different origins. -- by :user:`MaximZemskov`. diff --git a/CHANGES/6807.bugfix.rst b/CHANGES/6807.bugfix.rst new file mode 100644 index 00000000000..4eb07b9e0da --- /dev/null +++ b/CHANGES/6807.bugfix.rst @@ -0,0 +1 @@ +Stopped logging exceptions from ``web.run_app()`` that would be raised regardless -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/7167.bugfix.rst b/CHANGES/7167.bugfix.rst new file mode 100644 index 00000000000..766f1438b66 --- /dev/null +++ b/CHANGES/7167.bugfix.rst @@ -0,0 +1 @@ +Changed ``make_mocked_request()`` to use empty payload by default -- by :user:`rahulnht`. diff --git a/CHANGES/7731.misc.rst b/CHANGES/7731.misc.rst new file mode 100644 index 00000000000..f46ffa5816b --- /dev/null +++ b/CHANGES/7731.misc.rst @@ -0,0 +1 @@ +Added flake8 settings to avoid some forms of implicit concatenation. -- by :user:`booniepepper`. diff --git a/CHANGES/8612.feature.rst b/CHANGES/8612.feature.rst new file mode 100644 index 00000000000..96adcf6dc4c --- /dev/null +++ b/CHANGES/8612.feature.rst @@ -0,0 +1 @@ +Exported ``ClientWSTimeout`` to top-level namespace -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8768.bugfix.rst b/CHANGES/8768.bugfix.rst new file mode 100644 index 00000000000..18512163572 --- /dev/null +++ b/CHANGES/8768.bugfix.rst @@ -0,0 +1 @@ +Used more precise type for ``ClientResponseError.headers``, fixing some type errors when using them -- by :user:`Dreamorcerer`. diff --git a/CHANGES/8797.breaking.rst b/CHANGES/8797.breaking.rst new file mode 100644 index 00000000000..c219ea3d264 --- /dev/null +++ b/CHANGES/8797.breaking.rst @@ -0,0 +1 @@ +Dropped support for Python 3.8 -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8823.bugfix.rst b/CHANGES/8823.bugfix.rst new file mode 100644 index 00000000000..ea18e65fd4a --- /dev/null +++ b/CHANGES/8823.bugfix.rst @@ -0,0 +1 @@ +Fixed Python parser chunked handling with multiple Transfer-Encoding values -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8845.bugfix.rst b/CHANGES/8845.bugfix.rst new file mode 100644 index 00000000000..ff0016ac14b --- /dev/null +++ b/CHANGES/8845.bugfix.rst @@ -0,0 +1 @@ +Changed behaviour when returning an invalid response to send a 500 response -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8847.misc.rst b/CHANGES/8847.misc.rst new file mode 100644 index 00000000000..58f61d48420 --- /dev/null +++ b/CHANGES/8847.misc.rst @@ -0,0 +1 @@ +Improved performance of making requests when there are no auto headers to skip -- by :user:`bdraco`. diff --git a/CHANGES/8858.bugfix.rst b/CHANGES/8858.bugfix.rst new file mode 100644 index 00000000000..e4efa91a2fd --- /dev/null +++ b/CHANGES/8858.bugfix.rst @@ -0,0 +1 @@ +Stopped adding a default Content-Type header when response has no content -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8875.bugfix.rst b/CHANGES/8875.bugfix.rst new file mode 100644 index 00000000000..fa33df05ae2 --- /dev/null +++ b/CHANGES/8875.bugfix.rst @@ -0,0 +1 @@ +Fixed an unclosed transport ``ResourceWarning`` on web handlers -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8876.bugfix.rst b/CHANGES/8876.bugfix.rst new file mode 100644 index 00000000000..539eeb4c7d3 --- /dev/null +++ b/CHANGES/8876.bugfix.rst @@ -0,0 +1 @@ +Fixed error handling after 100-continue so server sends 500 response instead of disconnecting -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8878.bugfix.rst b/CHANGES/8878.bugfix.rst new file mode 100644 index 00000000000..df53dea3c35 --- /dev/null +++ b/CHANGES/8878.bugfix.rst @@ -0,0 +1 @@ +Fixed response reading from closed session to throw an error immediately instead of timing out -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8898.bugfix.rst b/CHANGES/8898.bugfix.rst new file mode 100644 index 00000000000..0de6646c8cb --- /dev/null +++ b/CHANGES/8898.bugfix.rst @@ -0,0 +1 @@ +Fixed web router not matching pre-encoded URLs (requires yarl 1.9.6+) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8908.bugfix.rst b/CHANGES/8908.bugfix.rst new file mode 100644 index 00000000000..0eb450431db --- /dev/null +++ b/CHANGES/8908.bugfix.rst @@ -0,0 +1 @@ +Fixed ``CancelledError`` from one cleanup context stopping other contexts from completing -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8920.misc.rst b/CHANGES/8920.misc.rst new file mode 100644 index 00000000000..2e8640593a4 --- /dev/null +++ b/CHANGES/8920.misc.rst @@ -0,0 +1 @@ +Enabled keep-alive support on proxies (which was originally disabled several years ago) -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8929.bugfix.rst b/CHANGES/8929.bugfix.rst new file mode 100644 index 00000000000..229d5abd0e7 --- /dev/null +++ b/CHANGES/8929.bugfix.rst @@ -0,0 +1 @@ +Fixed ``Site.name`` when host is an empty string -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8947.misc.rst b/CHANGES/8947.misc.rst new file mode 100644 index 00000000000..277ba915c50 --- /dev/null +++ b/CHANGES/8947.misc.rst @@ -0,0 +1 @@ +Exported ``aiohttp.TraceRequestHeadersSentParams`` -- by :user:`Hadock-is-ok`. diff --git a/CHANGES/8956.feature.rst b/CHANGES/8956.feature.rst new file mode 100644 index 00000000000..245b481089a --- /dev/null +++ b/CHANGES/8956.feature.rst @@ -0,0 +1 @@ +Added ``secure``/``httponly``/``samesite`` parameters to ``.del_cookie()`` -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8966.feature.rst b/CHANGES/8966.feature.rst new file mode 100644 index 00000000000..ab1dc45b60e --- /dev/null +++ b/CHANGES/8966.feature.rst @@ -0,0 +1 @@ +Updated ClientSession's auth logic to include default auth only if the request URL's origin matches _base_url; otherwise, the auth will not be included -- by :user:`MaximZemskov` diff --git a/CHANGES/8967.bugfix.rst b/CHANGES/8967.bugfix.rst new file mode 100644 index 00000000000..1046f36bd8b --- /dev/null +++ b/CHANGES/8967.bugfix.rst @@ -0,0 +1 @@ +Fixed resolve_host() 'Task was destroyed but is pending' errors -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8968.doc.rst b/CHANGES/8968.doc.rst new file mode 100644 index 00000000000..3420794586f --- /dev/null +++ b/CHANGES/8968.doc.rst @@ -0,0 +1 @@ +Clarified which timeout exceptions happen on which timeouts -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8990.bugfix.rst b/CHANGES/8990.bugfix.rst new file mode 100644 index 00000000000..9a9783103fd --- /dev/null +++ b/CHANGES/8990.bugfix.rst @@ -0,0 +1 @@ +Fixed changing scheme/host in ``Response.clone()`` for absolute URLs -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8991.doc.rst b/CHANGES/8991.doc.rst new file mode 100644 index 00000000000..c29850c4f3c --- /dev/null +++ b/CHANGES/8991.doc.rst @@ -0,0 +1 @@ +Updated ``ClientSession`` parameters to match current code -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/8992.bugfix.rst b/CHANGES/8992.bugfix.rst new file mode 100644 index 00000000000..bc41d5feb81 --- /dev/null +++ b/CHANGES/8992.bugfix.rst @@ -0,0 +1 @@ +Fixed client incorrectly reusing a connection when the previous message had not been fully sent -- by :user:`Dreamsorcerer`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 202193375dd..e7214dfedd4 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -74,6 +74,7 @@ Chih-Yuan Chen Chris AtLee Chris Laws Chris Moore +Chris Shucksmith Christopher Schmitt Claudiu Popa Colin Dunklau @@ -273,6 +274,7 @@ Philipp A. Pieter van Beek Qiao Han Rafael Viotti +Rahul Nahata Raphael Bialon Raúl Cumplido Required Field @@ -352,6 +354,7 @@ William Grzybowski William S. Wilson Ong wouter bolsterlee +Xavier Halloran Xiang Li Yang Zhou Yannick Koechlin diff --git a/Makefile b/Makefile index bb2d437a134..2a40be049ee 100644 --- a/Makefile +++ b/Makefile @@ -112,11 +112,7 @@ define run_tests_in_docker docker run --rm -ti -v `pwd`:/src -w /src "aiohttp-test-$(1)-$(2)" $(TEST_SPEC) endef -.PHONY: test-3.8-no-extensions test-3.8 test-3.9-no-extensions test -test-3.8-no-extensions: - $(call run_tests_in_docker,3.8,y) -test-3.8: - $(call run_tests_in_docker,3.8,n) +.PHONY: test-3.9-no-extensions test test-3.9-no-extensions: $(call run_tests_in_docker,3.9,y) test-3.9: diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 79f40d6f8f3..15602a7dc85 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -21,6 +21,7 @@ ClientSSLError, ClientTimeout, ClientWebSocketResponse, + ClientWSTimeout, ConnectionTimeoutError, ContentTypeError, Fingerprint, @@ -106,6 +107,7 @@ TraceRequestChunkSentParams as TraceRequestChunkSentParams, TraceRequestEndParams as TraceRequestEndParams, TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, TraceRequestRedirectParams as TraceRequestRedirectParams, TraceRequestStartParams as TraceRequestStartParams, TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, @@ -138,6 +140,7 @@ "ClientSession", "ClientTimeout", "ClientWebSocketResponse", + "ClientWSTimeout", "ConnectionTimeoutError", "ContentTypeError", "Fingerprint", @@ -224,6 +227,7 @@ "TraceRequestChunkSentParams", "TraceRequestEndParams", "TraceRequestExceptionParams", + "TraceRequestHeadersSentParams", "TraceRequestRedirectParams", "TraceRequestStartParams", "TraceResponseChunkReceivedParams", diff --git a/aiohttp/client.py b/aiohttp/client.py index 3d1045f355a..3c4a0f97c04 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -73,7 +73,11 @@ RequestInfo as RequestInfo, _merge_ssl_params, ) -from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse +from .client_ws import ( + DEFAULT_WS_CLIENT_TIMEOUT, + ClientWebSocketResponse as ClientWebSocketResponse, + ClientWSTimeout as ClientWSTimeout, +) from .connector import ( HTTP_AND_EMPTY_SCHEMA_SET, BaseConnector as BaseConnector, @@ -142,6 +146,7 @@ # client "ClientSession", "ClientTimeout", + "ClientWSTimeout", "request", ) @@ -305,7 +310,7 @@ def __init__( self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: warnings.warn( - "read_timeout is deprecated, " "use timeout argument instead", + "read_timeout is deprecated, use timeout argument instead", DeprecationWarning, stacklevel=2, ) @@ -313,7 +318,7 @@ def __init__( if conn_timeout is not None: self._timeout = attr.evolve(self._timeout, connect=conn_timeout) warnings.warn( - "conn_timeout is deprecated, " "use timeout argument instead", + "conn_timeout is deprecated, use timeout argument instead", DeprecationWarning, stacklevel=2, ) @@ -592,7 +597,10 @@ async def _request( if auth is None: auth = auth_from_url - if auth is None: + + if auth is None and ( + not self._base_url or self._base_url.origin() == url.origin() + ): auth = self._default_auth # It would be confusing if we support explicit # Authorization header with auth argument @@ -627,7 +635,7 @@ async def _request( url, params=params, headers=headers, - skip_auto_headers=skip_headers, + skip_auto_headers=skip_headers if skip_headers else None, data=data, cookies=all_cookies, auth=auth, @@ -820,7 +828,7 @@ def ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: float = 10.0, + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, @@ -872,7 +880,7 @@ async def _ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: float = 10.0, + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, @@ -891,6 +899,29 @@ async def _ws_connect( compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, ) -> ClientWebSocketResponse: + if timeout is not sentinel: + if isinstance(timeout, ClientWSTimeout): + ws_timeout = timeout + else: + warnings.warn( + "parameter 'timeout' of type 'float' " + "is deprecated, please use " + "'timeout=ClientWSTimeout(ws_close=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = ClientWSTimeout(ws_close=timeout) + else: + ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT + if receive_timeout is not None: + warnings.warn( + "float parameter 'receive_timeout' " + "is deprecated, please use parameter " + "'timeout=ClientWSTimeout(ws_receive=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout) if headers is None: real_headers: CIMultiDict[str] = CIMultiDict() @@ -1021,12 +1052,13 @@ async def _ws_connect( # For WS connection the read_timeout must be either receive_timeout or greater # None == no timeout, i.e. infinite timeout, so None is the max timeout possible - if receive_timeout is None: + if ws_timeout.ws_receive is None: # Reset regardless - conn_proto.read_timeout = receive_timeout + conn_proto.read_timeout = None elif conn_proto.read_timeout is not None: - # If read_timeout was set check which wins - conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout) + conn_proto.read_timeout = max( + ws_timeout.ws_receive, conn_proto.read_timeout + ) transport = conn.transport assert transport is not None @@ -1050,11 +1082,10 @@ async def _ws_connect( writer, protocol, resp, - timeout, + ws_timeout, autoclose, autoping, self._loop, - receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover, @@ -1224,7 +1255,7 @@ def requote_redirect_url(self) -> bool: def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn( - "session.requote_redirect_url modification " "is deprecated #2778", + "session.requote_redirect_url modification is deprecated #2778", DeprecationWarning, stacklevel=2, ) diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index ff29b3d3ca9..36bb6d1c0d8 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -4,8 +4,10 @@ import warnings from typing import TYPE_CHECKING, Optional, Tuple, Union +from multidict import MultiMapping + from .http_parser import RawResponseMessage -from .typedefs import LooseHeaders, StrOrURL +from .typedefs import StrOrURL try: import ssl @@ -71,7 +73,7 @@ def __init__( code: Optional[int] = None, status: Optional[int] = None, message: str = "", - headers: Optional[LooseHeaders] = None, + headers: Optional[MultiMapping[str]] = None, ) -> None: self.request_info = request_info if code is not None: diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index f8c83240209..e612450c746 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -62,9 +62,6 @@ def should_close(self) -> bool: or bool(self._tail) ) - def force_close(self) -> None: - self._should_close = True - def close(self) -> None: transport = self.transport if transport is not None: diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index bea76d84c39..d7d5f63ec18 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -127,9 +127,7 @@ def __init__(self, fingerprint: bytes) -> None: if not hashfunc: raise ValueError("fingerprint has invalid length") elif hashfunc is md5 or hashfunc is sha1: - raise ValueError( - "md5 and sha1 are insecure and " "not supported. Use sha256." - ) + raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.") self._hashfunc = hashfunc self._fingerprint = fingerprint @@ -190,7 +188,7 @@ def _merge_ssl_params( ssl = ssl_context if fingerprint is not None: warnings.warn( - "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead", + "fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead", DeprecationWarning, stacklevel=3, ) @@ -209,7 +207,7 @@ def _merge_ssl_params( return ssl -@attr.s(auto_attribs=True, slots=True, frozen=True) +@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True) class ConnectionKey: # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool @@ -265,7 +263,7 @@ def __init__( *, params: Optional[Mapping[str, str]] = None, headers: Optional[LooseHeaders] = None, - skip_auto_headers: Iterable[str] = frozenset(), + skip_auto_headers: Optional[Iterable[str]] = None, data: Any = None, cookies: Optional[LooseCookies] = None, auth: Optional[BasicAuth] = None, @@ -408,8 +406,8 @@ def update_host(self, url: URL) -> None: # basic auth info username, password = url.user, url.password - if username: - self.auth = helpers.BasicAuth(username, password or "") + if username or password: + self.auth = helpers.BasicAuth(username or "", password or "") def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. @@ -451,12 +449,18 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: else: self.headers.add(key, value) - def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None: - self.skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self.skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type] + else: + # Fast path when there are no headers to skip + # which is the most common case. + self.skip_auto_headers = CIMultiDict() + used_headers = self.headers for hdr, val in self.DEFAULT_HEADERS.items(): if hdr not in used_headers: @@ -499,7 +503,7 @@ def update_content_encoding(self, data: Any) -> None: if enc: if self.compress: raise ValueError( - "compress can not be set " "if Content-Encoding header is set" + "compress can not be set if Content-Encoding header is set" ) elif self.compress: if not isinstance(self.compress, str): @@ -521,7 +525,7 @@ def update_transfer_encoding(self) -> None: elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( - "chunked can not be set " "if Content-Length header is set" + "chunked can not be set if Content-Length header is set" ) self.headers[hdrs.TRANSFER_ENCODING] = "chunked" @@ -573,9 +577,7 @@ def update_body_from_data(self, body: Any) -> None: # copy payload headers assert body.headers for key, value in body.headers.items(): - if key in self.headers: - continue - if key in self.skip_auto_headers: + if key in self.headers or key in self.skip_auto_headers: continue self.headers[key] = value @@ -655,7 +657,8 @@ async def write_bytes( set_exception(protocol, reraised_exc, underlying_exc) except asyncio.CancelledError: - await writer.write_eof() + # Body hasn't been fully sent, so connection can't be reused. + conn.close() except Exception as underlying_exc: set_exception( protocol, @@ -727,9 +730,8 @@ async def send(self, conn: "Connection") -> "ClientResponse": self.headers[hdrs.CONNECTION] = connection # status + headers - status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format( - self.method, path, v=self.version - ) + v = self.version + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" await writer.write_headers(status_line, self.headers) coro = self.write_bytes(writer, conn) @@ -1201,7 +1203,7 @@ async def json( self.history, status=self.status, message=( - "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype + "Attempt to decode JSON with unexpected mimetype: %s" % ctype ), headers=self.headers, ) diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index 7b3a5bf952d..6246234b8e0 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -4,6 +4,8 @@ import sys from typing import Any, Optional, cast +import attr + from .client_exceptions import ClientError, ServerTimeoutError from .client_reqrep import ClientResponse from .helpers import calculate_timeout_when, set_result @@ -30,6 +32,15 @@ import async_timeout +@attr.s(frozen=True, slots=True) +class ClientWSTimeout: + ws_receive = attr.ib(type=Optional[float], default=None) + ws_close = attr.ib(type=Optional[float], default=None) + + +DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0) + + class ClientWebSocketResponse: def __init__( self, @@ -37,12 +48,11 @@ def __init__( writer: WebSocketWriter, protocol: Optional[str], response: ClientResponse, - timeout: float, + timeout: ClientWSTimeout, autoclose: bool, autoping: bool, loop: asyncio.AbstractEventLoop, *, - receive_timeout: Optional[float] = None, heartbeat: Optional[float] = None, compress: int = 0, client_notakeover: bool = False, @@ -57,7 +67,6 @@ def __init__( self._closing = False self._close_code: Optional[int] = None self._timeout = timeout - self._receive_timeout = receive_timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat @@ -268,7 +277,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo while True: try: - async with async_timeout.timeout(self._timeout): + async with async_timeout.timeout(self._timeout.ws_close): msg = await self._reader.read() except asyncio.CancelledError: self._close_code = WSCloseCode.ABNORMAL_CLOSURE @@ -288,7 +297,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo return False async def receive(self, timeout: Optional[float] = None) -> WSMessage: - receive_timeout = timeout or self._receive_timeout + receive_timeout = timeout or self._timeout.ws_receive while True: if self._waiting: diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 04115c36a24..360eabc7bb2 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -243,7 +243,7 @@ def __init__( if force_close: if keepalive_timeout is not None and keepalive_timeout is not sentinel: raise ValueError( - "keepalive_timeout cannot " "be set if force_close is True" + "keepalive_timeout cannot be set if force_close is True" ) else: if keepalive_timeout is sentinel: @@ -824,12 +824,16 @@ def __init__( self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave + self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() + for t in self._resolve_host_tasks: + t.cancel() + return super().close() @property @@ -849,7 +853,7 @@ def clear_dns_cache( if host is not None and port is not None: self._cached_hosts.remove((host, port)) elif host is not None or port is not None: - raise ValueError("either both host and port " "or none of them are allowed") + raise ValueError("either both host and port or none of them are allowed") else: self._cached_hosts.clear() @@ -907,6 +911,8 @@ async def _resolve_host( resolved_host_task = asyncio.create_task( self._resolve_host_with_throttle(key, host, port, traces) ) + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) try: return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: @@ -1376,11 +1382,6 @@ async def _create_proxy_connection( proxy_req, [], timeout, client_error=ClientProxyConnectionError ) - # Many HTTP proxies has buggy keepalive support. Let's not - # reuse connection but close it after processing every - # response. - proto.force_close() - auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) if auth is not None: if not req.is_ssl(): @@ -1574,7 +1575,7 @@ def __init__( self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] ): raise RuntimeError( - "Named Pipes only available in proactor " "loop under windows" + "Named Pipes only available in proactor loop under windows" ) self._path = path diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index e9997ce2935..c57604b5e59 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -54,7 +54,7 @@ class CookieJar(AbstractCookieJar): DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") DATE_MONTH_RE = re.compile( - "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)", + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)", re.I, ) @@ -70,7 +70,7 @@ class CookieJar(AbstractCookieJar): except (OSError, ValueError): # Hit the maximum representable time on Windows # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 - # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere + # Throws ValueError on PyPy 3.9, OSError elsewhere MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) except OverflowError: # #4515: datetime.max may not be representable on 32-bit platforms diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index 2b75b3de72c..39ca8539acc 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -64,9 +64,7 @@ def add_field( type_options: MultiDict[str] = MultiDict({"name": name}) if filename is not None and not isinstance(filename, str): - raise TypeError( - "filename must be an instance of str. " "Got: %s" % filename - ) + raise TypeError("filename must be an instance of str. Got: %s" % filename) if filename is None and isinstance(value, io.IOBase): filename = guess_filename(value, name) if filename is not None: @@ -77,7 +75,7 @@ def add_field( if content_type is not None: if not isinstance(content_type, str): raise TypeError( - "content_type must be an instance of str. " "Got: %s" % content_type + "content_type must be an instance of str. Got: %s" % content_type ) headers[hdrs.CONTENT_TYPE] = content_type self._is_multipart = True @@ -131,7 +129,7 @@ def _gen_form_urlencoded(self) -> payload.BytesPayload: if charset == "utf-8": content_type = "application/x-www-form-urlencoded" else: - content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset + content_type = "application/x-www-form-urlencoded; charset=%s" % charset return payload.BytesPayload( urlencode(data, doseq=True, encoding=charset).encode(), diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ccfa9d5e2fe..bf9e135bb3c 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -164,9 +164,9 @@ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth" """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") - if url.user is None: + if url.user is None and url.password is None: return None - return cls(url.user, url.password or "", encoding=encoding) + return cls(url.user or "", url.password or "", encoding=encoding) def encode(self) -> str: """Encode credentials.""" @@ -393,16 +393,14 @@ def content_disposition_header( params is a dict with disposition params. """ if not disptype or not (TOKEN > set(disptype)): - raise ValueError("bad content disposition type {!r}" "".format(disptype)) + raise ValueError(f"bad content disposition type {disptype!r}") value = disptype if params: lparams = [] for key, val in params.items(): if not key or not (TOKEN > set(key)): - raise ValueError( - "bad content disposition parameter" " {!r}={!r}".format(key, val) - ) + raise ValueError(f"bad content disposition parameter {key!r}={val!r}") if quote_fields: if key.lower() == "filename": qval = quote(val, "", encoding=_charset) @@ -690,9 +688,7 @@ def __enter__(self) -> BaseTimerContext: task = asyncio.current_task(loop=self._loop) if task is None: - raise RuntimeError( - "Timeout context manager should be used " "inside a task" - ) + raise RuntimeError("Timeout context manager should be used inside a task") if self._cancelled: raise asyncio.TimeoutError from None @@ -762,7 +758,8 @@ def content_type(self) -> str: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_type # type: ignore[return-value] + assert self._content_type is not None + return self._content_type @property def charset(self) -> Optional[str]: @@ -770,7 +767,8 @@ def charset(self) -> Optional[str]: raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: self._parse_content_type(raw) - return self._content_dict.get("charset") # type: ignore[union-attr] + assert self._content_dict is not None + return self._content_dict.get("charset") @property def content_length(self) -> Optional[int]: diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index b992955a011..f46cf833c03 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -277,8 +277,10 @@ def __init__( ) @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: - pass + def parse_message(self, lines: List[bytes]) -> _MsgT: ... + + @abc.abstractmethod + def _is_chunked_te(self, te: str) -> bool: ... def feed_eof(self) -> Optional[_MsgT]: if self._payload_parser is not None: @@ -537,10 +539,8 @@ def parse_headers( # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te is not None: - if "chunked" == te.lower(): + if self._is_chunked_te(te): chunked = True - else: - raise BadHttpMessage("Request has invalid `Transfer-Encoding`") if hdrs.CONTENT_LENGTH in headers: raise BadHttpMessage( @@ -650,6 +650,12 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: url, ) + def _is_chunked_te(self, te: str) -> bool: + if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + class HttpResponseParser(HttpParser[RawResponseMessage]): """Read response status line and headers. @@ -735,6 +741,10 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: chunked, ) + def _is_chunked_te(self, te: str) -> bool: + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" + class HttpPayloadParser: def __init__( diff --git a/aiohttp/http_websocket.py b/aiohttp/http_websocket.py index db0cb429d83..9d03d2773c7 100644 --- a/aiohttp/http_websocket.py +++ b/aiohttp/http_websocket.py @@ -115,6 +115,7 @@ class WSMsgType(IntEnum): PACK_RANDBITS = Struct("!L").pack MSG_SIZE: Final[int] = 2**14 DEFAULT_LIMIT: Final[int] = 2**16 +MASK_LEN: Final[int] = 4 class WSMessage(NamedTuple): @@ -266,7 +267,7 @@ def ws_ext_gen( # compress wbit 8 does not support in zlib if compress < 9 or compress > 15: raise ValueError( - "Compress wbits must between 9 and 15, " "zlib does not support wbits=8" + "Compress wbits must between 9 and 15, zlib does not support wbits=8" ) enabledext = ["permessage-deflate"] if not isserver: @@ -511,7 +512,7 @@ def parse_frame( if opcode > 0x7 and length > 125: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, - "Control frame payload cannot be " "larger than 125 bytes", + "Control frame payload cannot be larger than 125 bytes", ) # Set compress status if last package is FIN @@ -625,12 +626,18 @@ async def _send_frame( if self._closing and not (opcode & WSMsgType.CLOSE): raise ConnectionResetError("Cannot write to closing transport") + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 rsv = 0 - # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: + # RSV1 (rsv = 0x40) is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + rsv = 0x40 + if compress: # Do not set self._compress if compressing is for this frame compressobj = self._make_compress_obj(compress) @@ -649,28 +656,39 @@ async def _send_frame( ) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] - rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask - if use_mask: - mask_bit = 0x80 - else: - mask_bit = 0 + mask_bit = 0x80 if use_mask else 0 + # Depending on the message length, the header is assembled differently. + # The first byte is reserved for the opcode and the RSV bits. + first_byte = 0x80 | rsv | opcode if msg_length < 126: - header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) + header = PACK_LEN1(first_byte, msg_length | mask_bit) + header_len = 2 elif msg_length < (1 << 16): - header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) + header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length) + header_len = 4 else: - header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) + header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length) + header_len = 10 + + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3 + # If we are using a mask, we need to generate it randomly + # and apply it to the message before sending it. A mask is + # a 32-bit value that is applied to the message using a + # bitwise XOR operation. It is used to prevent certain types + # of attacks on the websocket protocol. The mask is only used + # when aiohttp is acting as a client. Servers do not use a mask. if use_mask: mask = PACK_RANDBITS(self.get_random_bits()) message = bytearray(message) _websocket_mask(mask, message) self._write(header + mask + message) - self._output_size += len(header) + len(mask) + msg_length + self._output_size += header_len + MASK_LEN + msg_length + else: if msg_length > MSG_SIZE: self._write(header) @@ -678,11 +696,16 @@ async def _send_frame( else: self._write(header + message) - self._output_size += len(header) + msg_length + self._output_size += header_len + msg_length # It is safe to return control to the event loop when using compression # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper() diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index e3680a7b2a1..e0bcce07449 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -530,9 +530,7 @@ def _decode_content_transfer(self, data: bytes) -> bytes: elif encoding in ("binary", "8bit", "7bit"): return data else: - raise RuntimeError( - "unknown content transfer encoding: {}" "".format(encoding) - ) + raise RuntimeError(f"unknown content transfer encoding: {encoding}") def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" @@ -561,6 +559,8 @@ def filename(self) -> Optional[str]: @payload_type(BodyPartReader, order=Order.try_first) class BodyPartReaderPayload(Payload): + _value: BodyPartReader + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) @@ -573,6 +573,9 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: if params: self.set_content_disposition("attachment", True, **params) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + async def write(self, writer: Any) -> None: field = self._value chunk = await field.read_chunk(size=2**16) @@ -790,6 +793,8 @@ async def _maybe_release_last_part(self) -> None: class MultipartWriter(Payload): """Multipart body writer.""" + _value: None + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, @@ -970,6 +975,16 @@ def size(self) -> Optional[int]: total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join( + "--" + + self.boundary + + "\n" + + part._binary_headers.decode(encoding, errors) + + part.decode() + for part, _e, _te in self._parts + ) + async def write(self, writer: Any, close_boundary: bool = True) -> None: """Write body.""" for part, encoding, te_encoding in self._parts: diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 5271393612a..27636977774 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -207,6 +207,13 @@ def set_content_disposition( disptype, quote_fields=quote_fields, _charset=_charset, **params ) + @abstractmethod + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the value. + + This is named decode() to allow compatibility with bytes objects. + """ + @abstractmethod async def write(self, writer: AbstractStreamWriter) -> None: """Write payload. @@ -216,6 +223,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesPayload(Payload): + _value: bytes + def __init__( self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any ) -> None: @@ -242,6 +251,9 @@ def __init__( **kwargs, ) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.decode(encoding, errors) + async def write(self, writer: AbstractStreamWriter) -> None: await writer.write(self._value) @@ -283,7 +295,7 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): - _value: IO[Any] + _value: io.IOBase def __init__( self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any @@ -307,9 +319,12 @@ async def write(self, writer: AbstractStreamWriter) -> None: finally: await loop.run_in_executor(None, self._value.close) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + class TextIOPayload(IOBasePayload): - _value: TextIO + _value: io.TextIOBase def __init__( self, @@ -346,6 +361,9 @@ def size(self) -> Optional[int]: except OSError: return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read() + async def write(self, writer: AbstractStreamWriter) -> None: loop = asyncio.get_event_loop() try: @@ -363,6 +381,8 @@ async def write(self, writer: AbstractStreamWriter) -> None: class BytesIOPayload(IOBasePayload): + _value: io.BytesIO + @property def size(self) -> int: position = self._value.tell() @@ -370,17 +390,27 @@ def size(self) -> int: self._value.seek(position) return end - position + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase + @property def size(self) -> Optional[int]: try: return os.fstat(self._value.fileno()).st_size - self._value.tell() - except OSError: + except (OSError, AttributeError): # data.fileno() is not supported, e.g. # io.BufferedReader(io.BytesIO(b'data')) + # For some file-like objects (e.g. tarfile), the fileno() attribute may + # not exist at all, and will instead raise an AttributeError. return None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + class JsonPayload(BytesPayload): def __init__( @@ -417,6 +447,7 @@ def __init__( class AsyncIterablePayload(Payload): _iter: Optional[_AsyncIterator] = None + _value: _AsyncIterable def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: if not isinstance(value, AsyncIterable): @@ -444,6 +475,9 @@ async def write(self, writer: AbstractStreamWriter) -> None: except StopAsyncIteration: self._iter = None + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + class StreamReaderPayload(AsyncIterablePayload): def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: diff --git a/aiohttp/payload_streamer.py b/aiohttp/payload_streamer.py index 364f763ae74..831fdc0a77f 100644 --- a/aiohttp/payload_streamer.py +++ b/aiohttp/payload_streamer.py @@ -65,6 +65,9 @@ class StreamWrapperPayload(Payload): async def write(self, writer: AbstractStreamWriter) -> None: await self._value(writer) + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + @payload_type(streamer) class StreamPayload(StreamWrapperPayload): diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index 10e36266abe..c8fce5b5706 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,6 +1,5 @@ import asyncio import socket -import sys from typing import Any, Dict, List, Optional, Tuple, Type, Union from .abc import AbstractResolver, ResolveResult @@ -18,7 +17,6 @@ _NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV -_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) class ThreadedResolver(AbstractResolver): @@ -49,7 +47,7 @@ async def resolve( # IPv6 is not supported by Python build, # or IPv6 is not enabled in the host continue - if address[3] and _SUPPORTS_SCOPE_ID: + if address[3]: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. @@ -116,7 +114,7 @@ async def resolve( address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr family = node.family if family == socket.AF_INET6: - if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: + if len(address) > 3 and address[3]: # This is essential for link-local IPv6 addresses. # LL IPv6 is a VERY rare case. Strictly speaking, we should use # getnameinfo() unconditionally, but performance makes sense. diff --git a/aiohttp/streams.py b/aiohttp/streams.py index b9b9c3fd96f..1ed78ce5db0 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -261,7 +261,7 @@ def begin_http_chunk_receiving(self) -> None: if self._http_chunk_splits is None: if self.total_bytes: raise RuntimeError( - "Called begin_http_chunk_receiving when" "some data was already fed" + "Called begin_http_chunk_receiving when some data was already fed" ) self._http_chunk_splits = [] @@ -296,6 +296,9 @@ def end_http_chunk_receiving(self) -> None: set_result(waiter, None) async def _wait(self, func_name: str) -> None: + if not self._protocol.connected: + raise RuntimeError("Connection closed.") + # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 97c1469dd2a..13b6f4d9c50 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -31,6 +31,7 @@ from .client_ws import ClientWebSocketResponse from .helpers import sentinel from .http import HttpVersion, RawRequestMessage +from .streams import EMPTY_PAYLOAD, StreamReader from .typedefs import StrOrURL from .web import ( Application, @@ -265,7 +266,7 @@ def __init__( ) -> None: if not isinstance(server, BaseTestServer): raise TypeError( - "server must be TestServer " "instance, found type: %r" % type(server) + "server must be TestServer instance, found type: %r" % type(server) ) self._server = server self._loop = loop @@ -631,7 +632,7 @@ def make_mocked_request( writer: Any = sentinel, protocol: Any = sentinel, transport: Any = sentinel, - payload: Any = sentinel, + payload: StreamReader = EMPTY_PAYLOAD, sslcontext: Optional[SSLContext] = None, client_max_size: int = 1024**2, loop: Any = ..., @@ -700,9 +701,6 @@ def make_mocked_request( protocol.transport = transport protocol.writer = writer - if payload is sentinel: - payload = mock.Mock() - req = Request( message, payload, protocol, writer, task, loop, client_max_size=client_max_size ) diff --git a/aiohttp/web.py b/aiohttp/web.py index 8708f1fcbec..1d18691f401 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -6,6 +6,7 @@ import warnings from argparse import ArgumentParser from collections.abc import Iterable +from contextlib import suppress from importlib import import_module from typing import ( Any, @@ -519,10 +520,14 @@ def run_app( except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: - _cancel_tasks({main_task}, loop) - _cancel_tasks(asyncio.all_tasks(loop), loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() + try: + main_task.cancel() + with suppress(asyncio.CancelledError): + loop.run_until_complete(main_task) + finally: + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() def main(argv: List[str]) -> None: @@ -576,7 +581,7 @@ def main(argv: List[str]) -> None: # Compatibility logic if args.path is not None and not hasattr(socket, "AF_UNIX"): arg_parser.error( - "file system paths not supported by your operating" " environment" + "file system paths not supported by your operating environment" ) logging.basicConfig(level=logging.DEBUG) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 3b4b6489e60..8403bbbc826 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -195,7 +195,7 @@ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: def _check_frozen(self) -> None: if self._frozen: warnings.warn( - "Changing state of started or joined " "application is deprecated", + "Changing state of started or joined application is deprecated", DeprecationWarning, stacklevel=3, ) @@ -433,7 +433,7 @@ def make_handler( ) -> Server: warnings.warn( - "Application.make_handler(...) is deprecated, " "use AppRunner API instead", + "Application.make_handler(...) is deprecated, use AppRunner API instead", DeprecationWarning, stacklevel=2, ) @@ -492,7 +492,7 @@ def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: yield m, True else: warnings.warn( - 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m), + f'old-style middleware "{m!r}" deprecated, see #2252', DeprecationWarning, stacklevel=2, ) @@ -579,7 +579,7 @@ async def _on_cleanup(self, app: Application) -> None: await it.__anext__() except StopAsyncIteration: pass - except Exception as exc: + except (Exception, asyncio.CancelledError) as exc: errors.append(exc) else: errors.append(RuntimeError(f"{it!r} has more than one 'yield'")) diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 0c23e375d25..2c253e03b0a 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -1,7 +1,6 @@ import asyncio import os import pathlib -import sys from contextlib import suppress from mimetypes import MimeTypes from stat import S_ISREG @@ -48,9 +47,6 @@ CONTENT_TYPES: Final[MimeTypes] = MimeTypes() -if sys.version_info < (3, 9): - CONTENT_TYPES.encodings_map[".br"] = "br" - # File extension to IANA encodings map that will be checked in the order defined. ENCODING_EXTENSIONS = MappingProxyType( {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index 39e1c8be50e..a2f159c3b7c 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -38,7 +38,7 @@ from .log import access_logger, server_logger from .streams import EMPTY_PAYLOAD, StreamReader from .tcp_helpers import tcp_keepalive -from .web_exceptions import HTTPException +from .web_exceptions import HTTPException, HTTPInternalServerError from .web_log import AccessLogger from .web_request import BaseRequest from .web_response import Response, StreamResponse @@ -260,9 +260,6 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._keepalive_handle is not None: self._keepalive_handle.cancel() - if self._waiter: - self._waiter.cancel() - # Wait for graceful handler completion if self._handler_waiter is not None: with suppress(asyncio.CancelledError, asyncio.TimeoutError): @@ -281,9 +278,7 @@ async def shutdown(self, timeout: Optional[float] = 15.0) -> None: if self._task_handler is not None: self._task_handler.cancel() - if self.transport is not None: - self.transport.close() - self.transport = None + self.force_close() def connection_made(self, transport: asyncio.BaseTransport) -> None: super().connection_made(transport) @@ -307,13 +302,12 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: return self._manager.connection_lost(self, exc) - super().connection_lost(exc) - # Grab value before setting _manager to None. handler_cancellation = self._manager.handler_cancellation + self.force_close() + super().connection_lost(exc) self._manager = None - self._force_close = True self._request_factory = None self._request_handler = None self._request_parser = None @@ -326,9 +320,6 @@ def connection_lost(self, exc: Optional[BaseException]) -> None: exc = ConnectionResetError("Connection lost") self._current_request._cancel(exc) - if self._waiter is not None: - self._waiter.cancel() - if handler_cancellation and self._task_handler is not None: self._task_handler.cancel() @@ -464,16 +455,16 @@ async def _handle_request( self._current_request = None except HTTPException as exc: resp = exc - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except asyncio.CancelledError: raise except asyncio.TimeoutError as exc: self.log_debug("Request handler timed out.", exc_info=exc) resp = self.handle_error(request, 504) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) except Exception as exc: resp = self.handle_error(request, 500, exc) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) else: # Deprecation warning (See #2415) if getattr(resp, "__http_exception__", False): @@ -484,7 +475,7 @@ async def _handle_request( DeprecationWarning, ) - reset = await self.finish_response(request, resp, start_time) + resp, reset = await self.finish_response(request, resp, start_time) finally: self._handler_waiter.set_result(None) @@ -584,10 +575,6 @@ async def start(self) -> None: except asyncio.CancelledError: self.log_debug("Ignored premature client disconnection ") break - except RuntimeError as exc: - if self.debug: - self.log_exception("Unhandled runtime exception", exc_info=exc) - self.force_close() except Exception as exc: self.log_exception("Unhandled exception", exc_info=exc) self.force_close() @@ -616,7 +603,7 @@ async def start(self) -> None: async def finish_response( self, request: BaseRequest, resp: StreamResponse, start_time: float - ) -> bool: + ) -> Tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. This has to @@ -635,22 +622,26 @@ async def finish_response( prepare_meth = resp.prepare except AttributeError: if resp is None: - raise RuntimeError("Missing return " "statement on request handler") + self.log_exception("Missing return statement on request handler") else: - raise RuntimeError( - "Web-handler should return " - "a response instance, " + self.log_exception( + "Web-handler should return a response instance, " "got {!r}".format(resp) ) + exc = HTTPInternalServerError() + resp = Response( + status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers + ) + prepare_meth = resp.prepare try: await prepare_meth(request) await resp.write_eof() except ConnectionError: self.log_access(request, resp, start_time) - return True - else: - self.log_access(request, resp, start_time) - return False + return resp, True + + self.log_access(request, resp, start_time) + return resp, False def handle_error( self, diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index a485f0dcea6..2465e6655ad 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -79,7 +79,7 @@ class FileField: filename: str file: io.BufferedReader content_type: str - headers: "CIMultiDictProxy[str]" + headers: CIMultiDictProxy[str] _TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" @@ -169,12 +169,16 @@ def __init__( self._payload_writer = payload_writer self._payload = payload - self._headers = message.headers + self._headers: CIMultiDictProxy[str] = message.headers self._method = message.method self._version = message.version self._cache: Dict[str, Any] = {} url = message.url if url.is_absolute(): + if scheme is not None: + url = url.with_scheme(scheme) + if host is not None: + url = url.with_host(host) # absolute URL is given, # override auto-calculating url, host, and scheme # all other properties should be good @@ -184,6 +188,10 @@ def __init__( self._rel_url = url.relative() else: self._rel_url = message.url + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None self._read_bytes: Optional[bytes] = None @@ -197,10 +205,6 @@ def __init__( self._transport_sslcontext = transport.get_extra_info("sslcontext") self._transport_peername = transport.get_extra_info("peername") - if scheme is not None: - self._cache["scheme"] = scheme - if host is not None: - self._cache["host"] = host if remote is not None: self._cache["remote"] = remote @@ -222,7 +226,7 @@ def clone( will reuse the one from the current request object. """ if self._read_bytes: - raise RuntimeError("Cannot clone request " "after reading its content") + raise RuntimeError("Cannot clone request after reading its content") dct: Dict[str, Any] = {} if method is not sentinel: @@ -493,7 +497,7 @@ def query_string(self) -> str: return self._rel_url.query_string @reify - def headers(self) -> "MultiMapping[str]": + def headers(self) -> CIMultiDictProxy[str]: """A case-insensitive multidict proxy with all headers.""" return self._headers @@ -769,7 +773,7 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": ) else: raise ValueError( - "To decode nested multipart you need " "to use custom reader", + "To decode nested multipart you need to use custom reader", ) field = await multipart.next() diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index 78d3fe32949..24ea9f5b46b 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -72,6 +72,8 @@ class StreamResponse(BaseClass, HeadersMixin): _length_check = True + _body: Union[None, bytes, bytearray, Payload] + def __init__( self, *, @@ -102,7 +104,7 @@ def __init__( @property def prepared(self) -> bool: - return self._payload_writer is not None + return self._eof_sent or self._payload_writer is not None @property def task(self) -> "Optional[asyncio.Task[None]]": @@ -132,9 +134,9 @@ def set_status( status: int, reason: Optional[str] = None, ) -> None: - assert not self.prepared, ( - "Cannot change the response status code after " "the headers have been sent" - ) + assert ( + not self.prepared + ), "Cannot change the response status code after the headers have been sent" self._status = int(status) if reason is None: try: @@ -166,7 +168,7 @@ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError( - "You can't enable chunked encoding when " "a content length is set" + "You can't enable chunked encoding when a content length is set" ) if chunk_size is not None: warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) @@ -182,9 +184,9 @@ def enable_compression( "Using boolean for force is deprecated #3318", DeprecationWarning ) elif force is not None: - assert isinstance(force, ContentCoding), ( - "force should one of " "None, bool or " "ContentEncoding" - ) + assert isinstance( + force, ContentCoding + ), "force should one of None, bool or ContentEncoding" self._compression = True self._compression_force = force @@ -249,7 +251,14 @@ def set_cookie( c["samesite"] = samesite def del_cookie( - self, name: str, *, domain: Optional[str] = None, path: str = "/" + self, + name: str, + *, + domain: Optional[str] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + samesite: Optional[str] = None, ) -> None: """Delete cookie. @@ -264,6 +273,9 @@ def del_cookie( expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path, + secure=secure, + httponly=httponly, + samesite=samesite, ) @property @@ -277,7 +289,7 @@ def content_length(self, value: Optional[int]) -> None: value = int(value) if self._chunked: raise RuntimeError( - "You can't set content length when " "chunked encoding is enable" + "You can't set content length when chunked encoding is enable" ) self._headers[hdrs.CONTENT_LENGTH] = str(value) else: @@ -479,7 +491,8 @@ async def _prepare_headers(self) -> None: # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 if hdrs.TRANSFER_ENCODING in headers: del headers[hdrs.TRANSFER_ENCODING] - else: + elif self.content_length != 0: + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") headers.setdefault(hdrs.DATE, rfc822_formatted_time()) headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) @@ -500,9 +513,7 @@ async def _write_headers(self) -> None: assert writer is not None # status line version = request.version - status_line = "HTTP/{}.{} {} {}".format( - version[0], version[1], self._status, self._reason - ) + status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" await writer.write_headers(status_line, self._headers) async def write(self, data: bytes) -> None: @@ -600,7 +611,7 @@ def __init__( real_headers = headers # = cast('CIMultiDict[str]', headers) if content_type is not None and "charset" in content_type: - raise ValueError("charset must not be in content_type " "argument") + raise ValueError("charset must not be in content_type argument") if text is not None: if hdrs.CONTENT_TYPE in real_headers: @@ -651,21 +662,17 @@ def body(self) -> Optional[Union[bytes, Payload]]: return self._body @body.setter - def body(self, body: bytes) -> None: + def body(self, body: Any) -> None: if body is None: - self._body: Optional[bytes] = None - self._body_payload: bool = False + self._body = None elif isinstance(body, (bytes, bytearray)): self._body = body - self._body_payload = False else: try: self._body = body = payload.PAYLOAD_REGISTRY.get(body) except payload.LookupError: raise ValueError("Unsupported body type %r" % type(body)) - self._body_payload = True - headers = self._headers # set content-type @@ -698,7 +705,6 @@ def text(self, text: str) -> None: self.charset = "utf-8" self._body = text.encode(self.charset) - self._body_payload = False self._compressed_body = None @property @@ -712,7 +718,7 @@ def content_length(self) -> Optional[int]: if self._compressed_body is not None: # Return length of the compressed body return len(self._compressed_body) - elif self._body_payload: + elif isinstance(self._body, Payload): # A payload without content length, or a compressed payload return None elif self._body is not None: @@ -737,9 +743,8 @@ async def write_eof(self, data: bytes = b"") -> None: if body is not None: if self._must_be_empty_body: await super().write_eof() - elif self._body_payload: - payload = cast(Payload, body) - await payload.write(self._payload_writer) + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) await super().write_eof() else: await super().write_eof(cast(bytes, body)) @@ -751,10 +756,9 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: if hdrs.CONTENT_LENGTH in self._headers: del self._headers[hdrs.CONTENT_LENGTH] elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers: - if self._body_payload: - size = cast(Payload, self._body).size - if size is not None: - self._headers[hdrs.CONTENT_LENGTH] = str(size) + if isinstance(self._body, Payload): + if self._body.size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) else: body_len = len(self._body) if self._body else "0" # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 @@ -766,7 +770,7 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: return await super()._start(request) async def _do_start_compression(self, coding: ContentCoding) -> None: - if self._body_payload or self._chunked: + if self._chunked or isinstance(self._body, Payload): return await super()._do_start_compression(coding) if coding != ContentCoding.identity: diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index 93802141c56..f51b6cd0081 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -66,7 +66,7 @@ def __repr__(self) -> str: info = [] for name, value in sorted(self.kwargs.items()): info.append(f", {name}={value!r}") - return " {handler.__name__!r}" "{info}>".format( + return " {handler.__name__!r}{info}>".format( method=self.method, path=self.path, handler=self.handler, info="".join(info) ) @@ -90,7 +90,7 @@ def __repr__(self) -> str: info = [] for name, value in sorted(self.kwargs.items()): info.append(f", {name}={value!r}") - return " {path}" "{info}>".format( + return " {path}{info}>".format( prefix=self.prefix, path=self.path, info="".join(info) ) diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index 2fe229c4e50..f8933383435 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -108,7 +108,7 @@ def __init__( @property def name(self) -> str: scheme = "https" if self._ssl_context else "http" - host = "0.0.0.0" if self._host is None else self._host + host = "0.0.0.0" if not self._host else self._host return str(URL.build(scheme=scheme, host=host, port=self._port)) async def start(self) -> None: @@ -176,7 +176,7 @@ def __init__( loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] ): raise RuntimeError( - "Named Pipes only available in proactor" "loop under windows" + "Named Pipes only available in proactor loop under windows" ) super().__init__(runner, shutdown_timeout=shutdown_timeout) self._path = path diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 558fb7d0c9b..765f8500c0e 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -194,14 +194,14 @@ def __init__( pass elif inspect.isgeneratorfunction(handler): warnings.warn( - "Bare generators are deprecated, " "use @coroutine wrapper", + "Bare generators are deprecated, use @coroutine wrapper", DeprecationWarning, ) elif isinstance(handler, type) and issubclass(handler, AbstractView): pass else: warnings.warn( - "Bare functions are deprecated, " "use async ones", DeprecationWarning + "Bare functions are deprecated, use async ones", DeprecationWarning ) @wraps(handler) @@ -341,6 +341,8 @@ async def _default_expect_handler(request: Request) -> None: if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + # Reset output_size as we haven't started the main body yet. + request.writer.output_size = 0 else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) @@ -379,7 +381,7 @@ def register_route(self, route: "ResourceRoute") -> None: async def resolve(self, request: Request) -> _Resolve: allowed_methods: Set[str] = set() - match_dict = self._match(request.rel_url.raw_path) + match_dict = self._match(request.rel_url.path) if match_dict is None: return None, allowed_methods @@ -648,7 +650,7 @@ def set_options_route(self, handler: Handler) -> None: ) async def resolve(self, request: Request) -> _Resolve: - path = request.rel_url.raw_path + path = request.rel_url.path method = request.method allowed_methods = set(self._routes) if not path.startswith(self._prefix2) and path != self._prefix: @@ -775,7 +777,7 @@ def _add_prefix_to_resources(self, prefix: str) -> None: router.index_resource(resource) def url_for(self, *args: str, **kwargs: str) -> URL: - raise RuntimeError(".url_for() is not supported " "by sub-application root") + raise RuntimeError(".url_for() is not supported by sub-application root") def get_info(self) -> _InfoDict: return {"app": self._app, "prefix": self._prefix} @@ -898,7 +900,7 @@ async def resolve(self, request: Request) -> _Resolve: return match_info, methods def __repr__(self) -> str: - return " {app!r}>" "".format(app=self._app) + return f" {self._app!r}>" class ResourceRoute(AbstractRoute): @@ -1038,7 +1040,7 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: # candidates for a given url part because there are multiple resources # registered for the same canonical path, we resolve them in a linear # fashion to ensure registration order is respected. - url_part = request.rel_url.raw_path + url_part = request.rel_url.path while url_part: for candidate in resource_index.get(url_part, ()): match_dict, allowed = await candidate.resolve(request) @@ -1163,7 +1165,7 @@ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: if resource.name == name and resource.raw_match(path): return cast(Resource, resource) if not ("{" in path or "}" in path or ROUTE_RE.search(path)): - resource = PlainResource(_requote_path(path), name=name) + resource = PlainResource(path, name=name) self.register_resource(resource) return resource resource = DynamicResource(path, name=name) @@ -1290,7 +1292,7 @@ def _quote_path(value: str) -> str: def _unquote_path(value: str) -> str: - return URL.build(path=value, encoded=True).path + return URL.build(path=value, encoded=True).path.replace("%2F", "/") def _requote_path(value: str) -> str: diff --git a/docs/client_reference.rst b/docs/client_reference.rst index 738892c6cc6..77230a755c6 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -41,17 +41,21 @@ The client session supports the context manager protocol for self closing. connector=None, cookies=None, \ headers=None, skip_auto_headers=None, \ auth=None, json_serialize=json.dumps, \ + request_class=ClientRequest, \ + response_class=ClientResponse, \ + ws_response_class=ClientWebSocketResponse, \ version=aiohttp.HttpVersion11, \ - cookie_jar=None, read_timeout=None, \ - conn_timeout=None, \ - timeout=sentinel, \ - raise_for_status=False, \ + cookie_jar=None, \ connector_owner=True, \ + raise_for_status=False, \ + timeout=sentinel, \ auto_decompress=True, \ - read_bufsize=2**16, \ - requote_redirect_url=True, \ trust_env=False, \ + requote_redirect_url=True, \ trace_configs=None, \ + read_bufsize=2**16, \ + max_line_size=8190, \ + max_field_size=8190, \ fallback_charset_resolver=lambda r, b: "utf-8") The class for creating client sessions and making requests. @@ -67,17 +71,6 @@ The client session supports the context manager protocol for self closing. :param aiohttp.BaseConnector connector: BaseConnector sub-class instance to support connection pooling. - :param loop: :ref:`event loop` used for - processing HTTP requests. - - If *loop* is ``None`` the constructor - borrows it from *connector* if specified. - - :func:`asyncio.get_event_loop` is used for getting default event - loop otherwise. - - .. deprecated:: 2.0 - :param dict cookies: Cookies to send with the request (optional) :param headers: HTTP Headers to send with every request (optional). @@ -99,7 +92,22 @@ The client session supports the context manager protocol for self closing. Iterable of :class:`str` or :class:`~multidict.istr` (optional) :param aiohttp.BasicAuth auth: an object that represents HTTP Basic - Authorization (optional) + Authorization (optional). It will be included + with any request. However, if the + ``_base_url`` parameter is set, the request + URL's origin must match the base URL's origin; + otherwise, the default auth will not be + included. + + :param collections.abc.Callable json_serialize: Json *serializer* callable. + + By default :func:`json.dumps` function. + + :param aiohttp.ClientRequest request_class: Custom class to use for client requests. + + :param ClientResponse response_class: Custom class to use for client responses. + + :param ClientWebSocketResponse ws_response_class: Custom class to use for websocket responses. :param version: supported HTTP version, ``HTTP 1.1`` by default. @@ -116,16 +124,20 @@ The client session supports the context manager protocol for self closing. :class:`aiohttp.DummyCookieJar` instance can be provided. - :param collections.abc.Callable json_serialize: Json *serializer* callable. + :param bool connector_owner: - By default :func:`json.dumps` function. + Close connector instance on session closing. + + Setting the parameter to ``False`` allows to share + connection pool between sessions without sharing session state: + cookies etc. :param bool raise_for_status: Automatically call :meth:`ClientResponse.raise_for_status()` for each response, ``False`` by default. - This parameter can be overridden when you making a request, e.g.:: + This parameter can be overridden when making a request, e.g.:: client_session = aiohttp.ClientSession(raise_for_status=True) resp = await client_session.get(url, raise_for_status=False) @@ -158,39 +170,10 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.3 - :param float read_timeout: Request operations timeout. ``read_timeout`` is - cumulative for all request operations (request, redirects, responses, - data consuming). By default, the read timeout is 5*60 seconds. - Use ``None`` or ``0`` to disable timeout checks. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param float conn_timeout: timeout for connection establishing - (optional). Values ``0`` or ``None`` mean no timeout. - - .. deprecated:: 3.3 - - Use ``timeout`` parameter instead. - - :param bool connector_owner: - - Close connector instance on session closing. - - Setting the parameter to ``False`` allows to share - connection pool between sessions without sharing session state: - cookies etc. - :param bool auto_decompress: Automatically decompress response body (``True`` by default). .. versionadded:: 2.3 - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - 64 KiB by default. - - .. versionadded:: 3.7 - :param bool trust_env: Trust environment settings for proxy configuration if the parameter is ``True`` (``False`` by default). See :ref:`aiohttp-client-proxy-support` for more information. @@ -227,6 +210,15 @@ The client session supports the context manager protocol for self closing. disabling. See :ref:`aiohttp-client-tracing-reference` for more information. + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + 64 KiB by default. + + .. versionadded:: 3.7 + + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver: A :term:`callable` that accepts a :class:`ClientResponse` and the :class:`bytes` contents, and returns a :class:`str` which will be used as @@ -371,12 +363,15 @@ The client session supports the context manager protocol for self closing. max_redirects=10,\ compress=None, chunked=None, expect100=False, raise_for_status=None,\ read_until_eof=True, \ - read_bufsize=None, \ proxy=None, proxy_auth=None,\ timeout=sentinel, ssl=True, \ - verify_ssl=None, fingerprint=None, \ - ssl_context=None, proxy_headers=None, \ - server_hostname=None, auto_decompress=None) + server_hostname=None, \ + proxy_headers=None, \ + trace_request_ctx=None, \ + read_bufsize=None, \ + auto_decompress=None, \ + max_line_size=None, \ + max_field_size=None) :async: :noindexentry: @@ -470,12 +465,6 @@ The client session supports the context manager protocol for self closing. does not have Content-Length header. ``True`` by default (optional). - :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). - ``None`` by default, - it means that the session global value is used. - - .. versionadded:: 3.7 - :param proxy: Proxy URL, :class:`str` or :class:`~yarl.URL` (optional) :param aiohttp.BasicAuth proxy_auth: an object that represents proxy HTTP @@ -503,29 +492,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 - :param bool verify_ssl: Perform SSL certificate validation for - *HTTPS* requests (enabled by default). May be disabled to - skip validation for sites with invalid certificates. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=False`` - - :param bytes fingerprint: Pass the SHA256 digest of the expected - certificate in DER format to verify that the certificate the - server presents matches. Useful for `certificate pinning - `_. - - Warning: use of MD5 or SHA1 digests is insecure and removed. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=aiohttp.Fingerprint(digest)`` - :param str server_hostname: Sets or overrides the host name that the target server’s certificate will be matched against. @@ -533,18 +499,6 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.9 - :param ssl.SSLContext ssl_context: ssl context used for processing - *HTTPS* requests (optional). - - *ssl_context* may be used for configuring certification - authority channel, supported SSL options etc. - - .. versionadded:: 2.3 - - .. deprecated:: 3.0 - - Use ``ssl=ssl_context`` - :param collections.abc.Mapping proxy_headers: HTTP headers to send to the proxy if the parameter proxy has been provided. @@ -557,10 +511,20 @@ The client session supports the context manager protocol for self closing. .. versionadded:: 3.0 + :param int read_bufsize: Size of the read buffer (:attr:`ClientResponse.content`). + ``None`` by default, + it means that the session global value is used. + + .. versionadded:: 3.7 + :param bool auto_decompress: Automatically decompress response body. Overrides :attr:`ClientSession.auto_decompress`. May be used to enable/disable auto decompression on a per-request basis. + :param int max_line_size: Maximum allowed size of lines in responses. + + :param int max_field_size: Maximum allowed size of header fields in responses. + :return ClientResponse: a :class:`client response ` object. @@ -687,8 +651,8 @@ The client session supports the context manager protocol for self closing. ` object. .. method:: ws_connect(url, *, method='GET', \ - protocols=(), timeout=10.0,\ - receive_timeout=None,\ + protocols=(), \ + timeout=sentinel,\ auth=None,\ autoclose=True,\ autoping=True,\ @@ -711,12 +675,11 @@ The client session supports the context manager protocol for self closing. :param tuple protocols: Websocket protocols - :param float timeout: Timeout for websocket to close. ``10`` seconds - by default - - :param float receive_timeout: Timeout for websocket to receive - complete message. ``None`` (unlimited) - seconds by default + :param timeout: a :class:`ClientWSTimeout` timeout for websocket. + By default, the value + `ClientWSTimeout(ws_receive=None, ws_close=10.0)` is used + (``10.0`` seconds for the websocket to close). + ``None`` means no timeout will be used. :param aiohttp.BasicAuth auth: an object that represents HTTP Basic Authorization (optional) @@ -990,7 +953,7 @@ is controlled by *force_close* constructor's parameter). flag. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1035,7 +998,7 @@ is controlled by *force_close* constructor's parameter). Endpoints are the same if they are have equal ``(host, port, is_ssl)`` triple. - If *limit_per_host* is ``None`` the connector has no limit per host. + If *limit_per_host* is ``0`` the connector has no limit per host. Read-only property. @@ -1130,7 +1093,7 @@ is controlled by *force_close* constructor's parameter). updated refreshing each entry after N seconds. :param int limit: total number simultaneous connections. If *limit* is - ``None`` the connector has no limit (default: 100). + ``0`` the connector has no limit (default: 100). :param int limit_per_host: limit simultaneous connections to the same endpoint. Endpoints are the same if they are @@ -1757,7 +1720,24 @@ Utilities :class:`float`, ``None`` by default. - .. versionadded:: 3.3 + +.. class:: ClientWSTimeout(*, ws_receive=None, ws_close=None) + + A data class for websocket client timeout settings. + + .. attribute:: ws_receive + + A timeout for websocket to receive a complete message. + + :class:`float`, ``None`` by default. + + .. attribute:: ws_close + + A timeout for the websocket to close. + + :class:`float`, ``10.0`` by default. + + .. versionadded:: 4.0 .. note:: @@ -1799,6 +1779,26 @@ Utilities .. versionadded:: 3.8 +.. class:: ContentDisposition + + A data class to represent the Content-Disposition header, + available as :attr:`ClientResponse.content_disposition` attribute. + + .. attribute:: type + + A :class:`str` instance. Value of Content-Disposition header + itself, e.g. ``attachment``. + + .. attribute:: filename + + A :class:`str` instance. Content filename extracted from + parameters. May be ``None``. + + .. attribute:: parameters + + Read-only mapping contains all parameters. + + .. class:: RequestInfo() A data class with request URL and headers from :class:`~aiohttp.ClientRequest` @@ -2149,25 +2149,6 @@ All exceptions are available as members of *aiohttp* module. Derived from :exc:`RedirectClientError` and :exc:`NonHttpUrlClientError` - -.. class:: ContentDisposition - - Represent Content-Disposition header - - .. attribute:: type - - A :class:`str` instance. Value of Content-Disposition header - itself, e.g. ``attachment``. - - .. attribute:: filename - - A :class:`str` instance. Content filename extracted from - parameters. May be ``None``. - - .. attribute:: parameters - - Read-only mapping contains all parameters. - Response errors ^^^^^^^^^^^^^^^ @@ -2306,17 +2287,20 @@ Connection errors Server operation timeout: read timeout, etc. + To catch all timeouts, including the ``total`` timeout, use + :exc:`asyncio.TimeoutError`. + Derived from :exc:`ServerConnectionError` and :exc:`asyncio.TimeoutError` .. class:: ConnectionTimeoutError - Connection timeout on request: e.g. read timeout. + Connection timeout on ``connect`` and ``sock_connect`` timeouts. Derived from :exc:`ServerTimeoutError` .. class:: SocketTimeoutError - Reading from socket timeout. + Reading from socket timeout on ``sock_read`` timeout. Derived from :exc:`ServerTimeoutError` diff --git a/docs/streams.rst b/docs/streams.rst index 10eec6d6a43..9d49a80f1b6 100644 --- a/docs/streams.rst +++ b/docs/streams.rst @@ -26,13 +26,17 @@ Reading Methods .. method:: StreamReader.read(n=-1) :async: - Read up to *n* bytes. If *n* is not provided, or set to ``-1``, read until - EOF and return all read bytes. + Read up to a maximum of *n* bytes. If *n* is not provided, or set to ``-1``, + read until EOF and return all read bytes. + + When *n* is provided, data will be returned as soon as it is available. + Therefore it will return less than *n* bytes if there are less than *n* + bytes in the buffer. If the EOF was received and the internal buffer is empty, return an empty bytes object. - :param int n: how many bytes to read, ``-1`` for the whole stream. + :param int n: maximum number of bytes to read, ``-1`` for the whole stream. :return bytes: the given data @@ -127,6 +131,14 @@ size limit and over any available data. async for data in response.content.iter_chunked(1024): print(data) + To get chunks that are exactly *n* bytes, you could use the + `asyncstdlib.itertools `_ + module:: + + chunks = batched(chain.from_iterable(response.content.iter_chunked(n)), n) + async for data in chunks: + print(data) + .. method:: StreamReader.iter_any() :async: diff --git a/docs/testing.rst b/docs/testing.rst index 828b5072b4d..a7b93e714f6 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -449,14 +449,12 @@ Framework Agnostic Utilities High level test creation:: - from aiohttp.test_utils import TestClient, TestServer, loop_context + from aiohttp.test_utils import TestClient, TestServer from aiohttp import request - # loop_context is provided as a utility. You can use any - # asyncio.BaseEventLoop class in its place. - with loop_context() as loop: + async def test(): app = _create_example_app() - with TestClient(TestServer(app), loop=loop) as client: + async with TestClient(TestServer(app)) as client: async def test_get_route(): nonlocal client @@ -465,7 +463,7 @@ High level test creation:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) + await test_get_route() If it's preferred to handle the creation / teardown on a more granular @@ -473,10 +471,10 @@ basis, the TestClient object can be used directly:: from aiohttp.test_utils import TestClient, TestServer - with loop_context() as loop: + async def test(): app = _create_example_app() - client = TestClient(TestServer(app), loop=loop) - loop.run_until_complete(client.start_server()) + client = TestClient(TestServer(app)) + await client.start_server() root = "http://127.0.0.1:{}".format(port) async def test_get_route(): @@ -485,8 +483,8 @@ basis, the TestClient object can be used directly:: text = await resp.text() assert "Hello, world" in text - loop.run_until_complete(test_get_route()) - loop.run_until_complete(client.close()) + await test_get_route() + await client.close() A full list of the utilities provided can be found at the diff --git a/docs/web_advanced.rst b/docs/web_advanced.rst index dc94bea33bf..070bae34f10 100644 --- a/docs/web_advanced.rst +++ b/docs/web_advanced.rst @@ -1064,13 +1064,10 @@ below:: async with client.pubsub() as pubsub: await pubsub.subscribe(channel) while True: - try: - msg = await pubsub.get_message(ignore_subscribe_messages=True) - if msg is not None: - for ws in app["websockets"]: - await ws.send_str("{}: {}".format(channel, msg)) - except asyncio.CancelledError: - break + msg = await pubsub.get_message(ignore_subscribe_messages=True) + if msg is not None: + for ws in app["websockets"]: + await ws.send_str("{}: {}".format(channel, msg)) async def background_tasks(app): @@ -1079,7 +1076,8 @@ below:: yield app[redis_listener].cancel() - await app[redis_listener] + with contextlib.suppress(asyncio.CancelledError): + await app[redis_listener] app = web.Application() diff --git a/requirements/base.txt b/requirements/base.txt index d90626583b0..5876b5881a7 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in @@ -30,13 +30,13 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -packaging==23.1 +packaging==24.1 # via gunicorn -pycares==4.3.0 +pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.9.4 +yarl==1.9.7 # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 93e94b158b0..428cfff3d7f 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in @@ -16,9 +16,9 @@ aioredis==2.0.1 # via -r requirements/lint.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -alabaster==0.7.12 +alabaster==0.7.13 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via @@ -26,29 +26,27 @@ async-timeout==4.0.3 ; python_version < "3.11" # aioredis attrs==24.2.0 # via -r requirements/runtime-deps.in -babel==2.9.1 +babel==2.16.0 # via sphinx -backports-entry-points-selectable==1.1.1 - # via virtualenv -blockdiag==2.0.1 +blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.0.3 +build==1.2.1 # via pip-tools -certifi==2023.7.22 +certifi==2024.8.30 # via requests cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.0.3 +click==8.1.7 # via # cherry-picker # pip-tools @@ -60,19 +58,19 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.2 +cryptography==43.0.0 # via # pyjwt # trustme cython==3.0.11 # via -r requirements/cython.in -distlib==0.3.3 +distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest -filelock==3.3.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via @@ -84,49 +82,53 @@ frozenlist==1.4.1 # aiosignal funcparserlib==1.0.1 # via blockdiag -gidgethub==5.0.1 +gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.3.5 +identify==2.6.0 # via pre-commit idna==3.3 # via # requests # trustme # yarl -imagesize==1.3.0 +imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.0 +importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier -iniconfig==1.1.1 +iniconfig==2.0.0 # via pytest -jinja2==3.0.3 +jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.0.1 +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 # via jinja2 +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.6.0 +nodeenv==1.9.1 # via pre-commit -packaging==21.2 +packaging==24.1 # via # build # gunicorn @@ -138,7 +140,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==2.4.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest @@ -146,25 +148,25 @@ pre-commit==3.5.0 # via -r requirements/lint.in proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.15.1 - # via sphinx -pyjwt==2.3.0 +pygments==2.18.0 + # via + # rich + # sphinx +pyjwt==2.9.0 # via # gidgethub # pyjwt -pyparsing==2.4.7 - # via packaging -pyproject-hooks==1.0.0 +pyproject-hooks==1.1.0 # via # build # pip-tools @@ -180,34 +182,36 @@ pytest-mock==3.14.0 # via # -r requirements/lint.in # -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2023.3.post1 +pytz==2024.1 # via babel -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2021.11.10 +regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via # cherry-picker # python-on-whales # sphinx +rich==13.8.0 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 - # via - # python-dateutil - # virtualenv + # via python-dateutil slotscheck==0.19.0 # via -r requirements/lint.in -snowballstemmer==2.1.0 +snowballstemmer==2.2.0 # via sphinx sphinx==7.1.2 # via @@ -215,13 +219,13 @@ sphinx==7.1.2 # sphinxcontrib-blockdiag # sphinxcontrib-spelling # sphinxcontrib-towncrier -sphinxcontrib-applehelp==1.0.2 +sphinxcontrib-applehelp==1.0.4 # via sphinx sphinxcontrib-blockdiag==3.0.0 # via -r requirements/doc.in sphinxcontrib-devhelp==1.0.2 # via sphinx -sphinxcontrib-htmlhelp==2.0.0 +sphinxcontrib-htmlhelp==2.0.1 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx @@ -238,9 +242,9 @@ tomli==2.0.1 # build # cherry-picker # coverage + # incremental # mypy # pip-tools - # pyproject-hooks # pytest # slotscheck # towncrier @@ -248,15 +252,15 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.62.3 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.6.1 +typer==0.12.5 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types @@ -264,33 +268,36 @@ typing-extensions==4.11.0 # pydantic # pydantic-core # python-on-whales + # rich + # typer uritemplate==4.1.1 # via gidgethub -urllib3==1.26.7 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.10.0 +virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==1.11.1 +webcolors==24.8.0 # via blockdiag -wheel==0.37.0 +wheel==0.44.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.7 # via -r requirements/runtime-deps.in -zipp==3.17.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==23.2.1 +pip==24.2 # via pip-tools -setuptools==68.0.0 +setuptools==74.1.0 # via # blockdiag + # incremental # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 15cf7d21685..55695f7eb64 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,7 +8,7 @@ aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via # -r requirements/lint.in # -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiohttp-theme==0.1.6 # via -r requirements/doc.in @@ -18,7 +18,7 @@ aiosignal==1.3.1 # via -r requirements/runtime-deps.in alabaster==0.7.13 # via sphinx -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via @@ -26,27 +26,27 @@ async-timeout==4.0.3 ; python_version < "3.11" # aioredis attrs==24.2.0 # via -r requirements/runtime-deps.in -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -build==1.0.3 +build==1.2.1 # via pip-tools -certifi==2023.7.22 +certifi==2024.8.30 # via requests cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests cherry-picker==2.2.0 # via -r requirements/dev.in -click==8.1.6 +click==8.1.7 # via # cherry-picker # pip-tools @@ -58,17 +58,17 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.3 +cryptography==43.0.0 # via # pyjwt # trustme -distlib==0.3.7 +distlib==0.3.8 # via virtualenv docutils==0.20.1 # via sphinx -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest -filelock==3.12.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via @@ -84,7 +84,7 @@ gidgethub==5.3.0 # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in -identify==2.5.26 +identify==2.6.0 # via pre-commit idna==3.4 # via @@ -93,35 +93,39 @@ idna==3.4 # yarl imagesize==1.4.1 # via sphinx -importlib-metadata==7.0.0 +importlib-metadata==8.4.0 # via # build # sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier iniconfig==2.0.0 # via pytest -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.5 # via jinja2 +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/test.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit -packaging==23.1 +packaging==24.1 # via # build # gunicorn @@ -133,7 +137,7 @@ pillow==9.5.0 # blockdiag pip-tools==7.4.1 # via -r requirements/dev.in -platformdirs==3.10.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest @@ -141,21 +145,23 @@ pre-commit==3.5.0 # via -r requirements/lint.in proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic -pygments==2.15.1 - # via sphinx +pygments==2.18.0 + # via + # rich + # sphinx pyjwt==2.8.0 # via # gidgethub # pyjwt -pyproject-hooks==1.0.0 +pyproject-hooks==1.1.0 # via # build # pip-tools @@ -171,27 +177,31 @@ pytest-mock==3.14.0 # via # -r requirements/lint.in # -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via # -r requirements/lint.in # -r requirements/test.in -pytz==2023.3.post1 +pytz==2024.1 # via babel -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via # cherry-picker # python-on-whales # sphinx +rich==13.8.0 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil slotscheck==0.19.0 @@ -224,9 +234,9 @@ tomli==2.0.1 # build # cherry-picker # coverage + # incremental # mypy # pip-tools - # pyproject-hooks # pytest # slotscheck # towncrier @@ -234,15 +244,15 @@ towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -tqdm==4.65.0 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via # -r requirements/lint.in # -r requirements/test.in -typer==0.9.0 +typer==0.12.5 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types @@ -250,35 +260,36 @@ typing-extensions==4.11.0 # pydantic # pydantic-core # python-on-whales + # rich # typer uritemplate==4.1.1 # via gidgethub -urllib3==2.0.4 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via # -r requirements/base.in # -r requirements/lint.in -virtualenv==20.24.2 +virtualenv==20.26.3 # via pre-commit wait-for-it==2.2.2 # via -r requirements/test.in -webcolors==1.13 +webcolors==24.8.0 # via blockdiag -wheel==0.41.0 +wheel==0.44.0 # via pip-tools -yarl==1.9.4 +yarl==1.9.7 # via -r requirements/runtime-deps.in -zipp==3.17.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources # The following packages are considered to be unsafe in a requirements file: -pip==23.2.1 +pip==24.2 # via pip-tools -setuptools==68.0.0 +setuptools==74.1.0 # via # blockdiag - # nodeenv + # incremental # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 9ee15189662..4ef41521136 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -8,15 +8,15 @@ aiohttp-theme==0.1.6 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2023.7.22 +certifi==2024.8.30 # via requests -charset-normalizer==3.3.1 +charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx @@ -26,19 +26,19 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -packaging==23.1 +packaging==24.1 # via sphinx pillow==9.5.0 # via @@ -46,19 +46,18 @@ pillow==9.5.0 # blockdiag pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.15.1 +pygments==2.18.0 # via sphinx -pytz==2023.3.post1 +pytz==2024.1 # via babel -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.1.2 # via # -r requirements/doc.in # sphinxcontrib-blockdiag - # sphinxcontrib-serializinghtml # sphinxcontrib-spelling # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 @@ -73,27 +72,31 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.3 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-spelling==8.0.0 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in tomli==2.0.1 - # via towncrier + # via + # incremental + # towncrier towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.0.4 +urllib3==2.2.2 # via requests -webcolors==1.13 +webcolors==24.8.0 # via blockdiag -zipp==3.17.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 - # via blockdiag +setuptools==74.1.0 + # via + # blockdiag + # incremental diff --git a/requirements/doc.txt b/requirements/doc.txt index d9e7fb0ad7f..804cb6e129d 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -8,15 +8,15 @@ aiohttp-theme==0.1.6 # via -r requirements/doc.in alabaster==0.7.13 # via sphinx -babel==2.12.1 +babel==2.16.0 # via sphinx blockdiag==3.0.0 # via sphinxcontrib-blockdiag -certifi==2023.7.22 +certifi==2024.8.30 # via requests -charset-normalizer==3.3.1 +charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via towncrier docutils==0.20.1 # via sphinx @@ -26,37 +26,36 @@ idna==3.4 # via requests imagesize==1.4.1 # via sphinx -importlib-metadata==6.8.0 +importlib-metadata==8.4.0 # via sphinx -importlib-resources==6.1.1 +importlib-resources==6.4.4 # via towncrier -incremental==22.10.0 +incremental==24.7.2 # via towncrier -jinja2==3.1.2 +jinja2==3.1.4 # via # sphinx # towncrier -markupsafe==2.1.3 +markupsafe==2.1.5 # via jinja2 -packaging==23.1 +packaging==24.1 # via sphinx pillow==9.5.0 # via # -c requirements/broken-projects.in # blockdiag -pygments==2.15.1 +pygments==2.18.0 # via sphinx -pytz==2023.3.post1 +pytz==2024.1 # via babel -requests==2.31.0 +requests==2.32.3 # via sphinx snowballstemmer==2.2.0 # via sphinx -sphinx==7.2.6 +sphinx==7.1.2 # via # -r requirements/doc.in # sphinxcontrib-blockdiag - # sphinxcontrib-serializinghtml # sphinxcontrib-towncrier sphinxcontrib-applehelp==1.0.4 # via sphinx @@ -70,25 +69,29 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.3 # via sphinx -sphinxcontrib-serializinghtml==1.1.9 +sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-towncrier==0.4.0a0 # via -r requirements/doc.in tomli==2.0.1 - # via towncrier + # via + # incremental + # towncrier towncrier==23.11.0 # via # -r requirements/doc.in # sphinxcontrib-towncrier -urllib3==2.0.4 +urllib3==2.2.2 # via requests -webcolors==1.13 +webcolors==24.8.0 # via blockdiag -zipp==3.17.0 +zipp==3.20.1 # via # importlib-metadata # importlib-resources # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 - # via blockdiag +setuptools==74.1.0 + # via + # blockdiag + # incremental diff --git a/requirements/lint.txt b/requirements/lint.txt index d5a6435c0b4..82efbf9a4ee 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -8,35 +8,35 @@ aiodns==3.2.0 # via -r requirements/lint.in aioredis==2.0.1 # via -r requirements/lint.in -annotated-types==0.6.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 # via aioredis -certifi==2024.2.2 +certifi==2024.8.30 # via requests cffi==1.17.0 # via # cryptography # pycares -cfgv==3.3.1 +cfgv==3.4.0 # via pre-commit charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # slotscheck # typer cryptography==43.0.0 # via trustme -distlib==0.3.7 +distlib==0.3.8 # via virtualenv -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest -filelock==3.12.2 +filelock==3.15.4 # via virtualenv freezegun==1.5.1 # via -r requirements/lint.in -identify==2.5.26 +identify==2.6.0 # via pre-commit idna==3.7 # via @@ -48,15 +48,15 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/lint.in mypy-extensions==1.0.0 # via mypy -nodeenv==1.8.0 +nodeenv==1.9.1 # via pre-commit -packaging==23.1 +packaging==24.1 # via pytest -platformdirs==3.10.0 +platformdirs==4.2.2 # via virtualenv pluggy==1.5.0 # via pytest @@ -66,11 +66,11 @@ pycares==4.4.0 # via aiodns pycparser==2.22 # via cffi -pydantic==2.7.1 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.18.2 +pydantic-core==2.20.1 # via pydantic -pygments==2.17.2 +pygments==2.18.0 # via rich pytest==8.3.2 # via @@ -82,11 +82,11 @@ python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via -r requirements/lint.in -pyyaml==6.0.1 +pyyaml==6.0.2 # via pre-commit -requests==2.31.0 +requests==2.32.3 # via python-on-whales -rich==13.7.1 +rich==13.8.0 # via typer shellingham==1.5.4 # via typer @@ -99,13 +99,13 @@ tomli==2.0.1 # mypy # pytest # slotscheck -tqdm==4.66.2 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 # via -r requirements/lint.in -typer==0.12.3 +typer==0.12.5 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # aioredis # annotated-types @@ -115,13 +115,9 @@ typing-extensions==4.11.0 # python-on-whales # rich # typer -urllib3==2.2.1 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" # via -r requirements/lint.in -virtualenv==20.24.2 +virtualenv==20.26.3 # via pre-commit - -# The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 - # via nodeenv diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 977f25cb3be..00927852825 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -6,7 +6,7 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in @@ -28,9 +28,9 @@ multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -pycares==4.3.0 +pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi -yarl==1.9.4 +yarl==1.9.7 # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d663e411bdb..a2bfc72a0a9 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -6,11 +6,11 @@ # aiodns==3.2.0 ; sys_platform == "linux" or sys_platform == "darwin" # via -r requirements/runtime-deps.in -aiohappyeyeballs==2.3.7 +aiohappyeyeballs==2.4.0 # via -r requirements/runtime-deps.in aiosignal==1.3.1 # via -r requirements/runtime-deps.in -annotated-types==0.5.0 +annotated-types==0.7.0 # via pydantic async-timeout==4.0.3 ; python_version < "3.11" # via -r requirements/runtime-deps.in @@ -18,15 +18,15 @@ attrs==24.2.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in -certifi==2023.7.22 +certifi==2024.8.30 # via requests cffi==1.17.0 # via # cryptography # pycares -charset-normalizer==3.2.0 +charset-normalizer==3.3.2 # via requests -click==8.1.6 +click==8.1.7 # via # typer # wait-for-it @@ -34,9 +34,9 @@ coverage==7.6.1 # via # -r requirements/test.in # pytest-cov -cryptography==41.0.2 +cryptography==43.0.0 # via trustme -exceptiongroup==1.1.2 +exceptiongroup==1.2.2 # via pytest freezegun==1.5.1 # via -r requirements/test.in @@ -53,15 +53,19 @@ idna==3.4 # yarl iniconfig==2.0.0 # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py multidict==6.0.5 # via # -r requirements/runtime-deps.in # yarl -mypy==1.11.1 ; implementation_name == "cpython" +mypy==1.11.2 ; implementation_name == "cpython" # via -r requirements/test.in mypy-extensions==1.0.0 # via mypy -packaging==23.1 +packaging==24.1 # via # gunicorn # pytest @@ -69,14 +73,16 @@ pluggy==1.5.0 # via pytest proxy-py==2.4.7 # via -r requirements/test.in -pycares==4.3.0 +pycares==4.4.0 # via aiodns -pycparser==2.21 +pycparser==2.22 # via cffi -pydantic==2.2.0 +pydantic==2.8.2 # via python-on-whales -pydantic-core==2.6.0 +pydantic-core==2.20.1 # via pydantic +pygments==2.18.0 + # via rich pytest==8.3.2 # via # -r requirements/test.in @@ -86,18 +92,22 @@ pytest-cov==5.0.0 # via -r requirements/test.in pytest-mock==3.14.0 # via -r requirements/test.in -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 # via freezegun python-on-whales==0.72.0 # via -r requirements/test.in re-assert==1.1.0 # via -r requirements/test.in -regex==2023.6.3 +regex==2024.7.24 # via re-assert -requests==2.31.0 +requests==2.32.3 # via python-on-whales +rich==13.8.0 + # via typer setuptools-git==1.2 # via -r requirements/test.in +shellingham==1.5.4 + # via typer six==1.16.0 # via python-dateutil tomli==2.0.1 @@ -105,25 +115,26 @@ tomli==2.0.1 # coverage # mypy # pytest -tqdm==4.65.0 +tqdm==4.66.5 # via python-on-whales trustme==1.1.0 ; platform_machine != "i686" # via -r requirements/test.in -typer==0.9.0 +typer==0.12.5 # via python-on-whales -typing-extensions==4.11.0 +typing-extensions==4.12.2 # via # annotated-types # mypy # pydantic # pydantic-core # python-on-whales + # rich # typer -urllib3==2.0.4 +urllib3==2.2.2 # via requests uvloop==0.20.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in wait-for-it==2.2.2 # via -r requirements/test.in -yarl==1.9.4 +yarl==1.9.7 # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index cfd1be5610f..cd1602880e6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,7 +33,6 @@ classifiers = Programming Language :: Python Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 @@ -42,7 +41,7 @@ classifiers = Topic :: Internet :: WWW/HTTP [options] -python_requires = >=3.8 +python_requires = >=3.9 packages = aiohttp # https://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag zip_safe = False @@ -87,9 +86,14 @@ max-line-length=79 zip_ok = false [flake8] -extend-select = B950 +extend-select = + B950, + # NIC001 -- "Implicitly concatenated str literals on one line" + NIC001, + # NIC101 -- "Implicitly concatenated bytes literals on one line" + NIC101, # TODO: don't disable D*, fix up issues instead -ignore = N801,N802,N803,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 +ignore = N801,N802,N803,NIC002,NIC102,E203,E226,E305,W504,E252,E301,E302,E501,E704,W503,W504,D1,D4 max-line-length = 88 per-file-ignores = # I900: Shouldn't appear in requirements for examples. @@ -138,8 +142,6 @@ addopts = filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning - ignore:unclosed transport :ResourceWarning ignore:Unclosed client session None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(foo="bar"), ) err.foo = "bar" for proto in range(pickle.HIGHEST_PROTOCOL + 1): @@ -54,7 +55,8 @@ def test_pickle(self) -> None: assert err2.history == () assert err2.status == 400 assert err2.message == "Something wrong" - assert err2.headers == {} + # Use headers.get() to verify static type is correct. + assert err2.headers.get("foo") == "bar" assert err2.foo == "bar" def test_repr(self) -> None: @@ -66,11 +68,11 @@ def test_repr(self) -> None: history=(), status=400, message="Something wrong", - headers={}, + headers=CIMultiDict(), ) assert repr(err) == ( "ClientResponseError(%r, (), status=400, " - "message='Something wrong', headers={})" % (self.request_info,) + "message='Something wrong', headers=)" % (self.request_info,) ) def test_str(self) -> None: @@ -79,11 +81,9 @@ def test_str(self) -> None: history=(), status=400, message="Something wrong", - headers={}, - ) - assert str(err) == ( - "400, message='Something wrong', " "url='http://example.com'" + headers=CIMultiDict(), ) + assert str(err) == ("400, message='Something wrong', url='http://example.com'") def test_response_status() -> None: @@ -252,7 +252,7 @@ def test_pickle(self) -> None: def test_repr(self) -> None: err = client.ServerDisconnectedError() - assert repr(err) == ("ServerDisconnectedError" "('Server disconnected')") + assert repr(err) == ("ServerDisconnectedError('Server disconnected')") err = client.ServerDisconnectedError(message="No connection") assert repr(err) == "ServerDisconnectedError('No connection')" diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 872876d4a32..74c4d99765e 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -9,7 +9,9 @@ import socket import ssl import sys +import tarfile import time +import zipfile from typing import Any, AsyncIterator, Type from unittest import mock @@ -29,7 +31,7 @@ SocketTimeoutError, TooManyRedirects, ) -from aiohttp.pytest_plugin import AiohttpClient, TestClient +from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer, TestClient from aiohttp.test_utils import unused_port @@ -339,10 +341,11 @@ async def data_gen(): async with client.get("/") as resp: assert 200 == resp.status - # Connection should have been reused + # First connection should have been closed, otherwise server won't know if it + # received the full message. conns = next(iter(client.session.connector._conns.values())) assert len(conns) == 1 - assert conns[0][0] is conn + assert conns[0][0] is not conn async def test_stream_request_on_server_eof_nested(aiohttp_client) -> None: @@ -360,14 +363,21 @@ async def data_gen(): yield b"just data" await asyncio.sleep(0.1) + assert client.session.connector is not None async with client.put("/", data=data_gen()) as resp: + first_conn = next(iter(client.session.connector._acquired)) assert 200 == resp.status - async with client.get("/") as resp: - assert 200 == resp.status + + async with client.get("/") as resp2: + assert 200 == resp2.status # Should be 2 separate connections conns = next(iter(client.session.connector._conns.values())) - assert len(conns) == 2 + assert len(conns) == 1 + + assert first_conn is not None + assert not first_conn.is_connected() + assert first_conn is not conns[0][0] async def test_HTTP_304_WITH_BODY(aiohttp_client) -> None: @@ -511,6 +521,61 @@ async def handler(request): assert 200 == resp.status +async def test_post_data_zipfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a zip file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted zipfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with zipfile.ZipFile(file=buf, mode="w") as zf: + with zf.open("payload1.txt", mode="w") as zip_filelike_writing: + zip_filelike_writing.write(data) + + buf.seek(0) + zf = zipfile.ZipFile(file=buf, mode="r") + resp = await client.post("/", data=zf.open("payload1.txt")) + assert 200 == resp.status + + +async def test_post_data_tarfile_filelike(aiohttp_client: AiohttpClient) -> None: + data = b"This is a tar file payload text file." + + async def handler(request: web.Request) -> web.Response: + val = await request.read() + assert data == val, "Transmitted tarfile member failed to match original data." + return web.Response() + + app = web.Application() + app.router.add_route("POST", "/", handler) + client = await aiohttp_client(app) + + buf = io.BytesIO() + with tarfile.open(fileobj=buf, mode="w") as tf: + ti = tarfile.TarInfo(name="payload1.txt") + ti.size = len(data) + tf.addfile(tarinfo=ti, fileobj=io.BytesIO(data)) + + # Random-access tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r:") + resp = await client.post("/", data=tf.extractfile("payload1.txt")) + assert 200 == resp.status + + # Streaming tarfile. + buf.seek(0) + tf = tarfile.open(fileobj=buf, mode="r|") + for entry in tf: + resp = await client.post("/", data=tf.extractfile(entry)) + assert 200 == resp.status + + async def test_ssl_client( aiohttp_server, ssl_ctx, @@ -727,7 +792,6 @@ async def handler(request): raw_headers = tuple((bytes(h), bytes(v)) for h, v in resp.raw_headers) assert raw_headers == ( (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -760,7 +824,6 @@ async def handler(request): assert raw_headers == ( (b"X-Empty", b""), (b"Content-Length", b"0"), - (b"Content-Type", b"application/octet-stream"), (b"Date", mock.ANY), (b"Server", mock.ANY), ) @@ -2298,7 +2361,7 @@ async def handler(request): ret.set_cookie("c2", "cookie2") ret.headers.add( "Set-Cookie", - "c3=cookie3; " "HttpOnly; Path=/" " Expires=Tue, 1 Jan 1980 12:00:00 GMT; ", + "c3=cookie3; HttpOnly; Path=/ Expires=Tue, 1 Jan 1980 12:00:00 GMT; ", ) return ret @@ -2317,7 +2380,7 @@ async def handler(request): ret = web.Response() ret.set_cookie("c1", "cookie1") ret.set_cookie("c2", "cookie2") - ret.headers.add("Set-Cookie", "c3=cookie3; " "HttpOnly; Path=/" " Max-Age=1; ") + ret.headers.add("Set-Cookie", "c3=cookie3; HttpOnly; Path=/ Max-Age=1; ") return ret app = web.Application() @@ -2338,7 +2401,7 @@ async def handler(request): ret = web.Response() ret.headers.add( "Set-Cookie", - "overflow=overflow; " "HttpOnly; Path=/" " Max-Age=" + str(overflow) + "; ", + "overflow=overflow; HttpOnly; Path=/ Max-Age=" + str(overflow) + "; ", ) return ret @@ -2850,6 +2913,138 @@ async def close(self): assert resp.status == 200 +async def test_auth_persist_on_redirect_to_other_host_with_global_auth( + create_server_for_url_and_handler, +) -> None: + url_from = URL("http://host1.com/path1") + url_to = URL("http://host2.com/path2") + + async def srv_from(request: web.Request): + assert request.host == url_from.host + assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz" + raise web.HTTPFound(url_to) + + async def srv_to(request: web.Request) -> web.Response: + assert request.host == url_to.host + assert "Authorization" in request.headers, "Header was dropped" + return web.Response() + + server_from = await create_server_for_url_and_handler(url_from, srv_from) + server_to = await create_server_for_url_and_handler(url_to, srv_to) + + assert ( + url_from.host != url_to.host or server_from.scheme != server_to.scheme + ), "Invalid test case, host or scheme must differ" + + protocol_port_map = { + "http": 80, + "https": 443, + } + etc_hosts = { + (url_from.host, protocol_port_map[server_from.scheme]): server_from, + (url_to.host, protocol_port_map[server_to.scheme]): server_to, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ): + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession( + connector=connector, auth=aiohttp.BasicAuth("user", "pass") + ) as client: + resp = await client.get(url_from) + assert resp.status == 200 + + +async def test_drop_auth_on_redirect_to_other_host_with_global_auth_and_base_url( + create_server_for_url_and_handler, +) -> None: + url_from = URL("http://host1.com/path1") + url_to = URL("http://host2.com/path2") + + async def srv_from(request: web.Request): + assert request.host == url_from.host + assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz" + raise web.HTTPFound(url_to) + + async def srv_to(request: web.Request) -> web.Response: + assert request.host == url_to.host + assert "Authorization" not in request.headers, "Header was not dropped" + return web.Response() + + server_from = await create_server_for_url_and_handler(url_from, srv_from) + server_to = await create_server_for_url_and_handler(url_to, srv_to) + + assert ( + url_from.host != url_to.host or server_from.scheme != server_to.scheme + ), "Invalid test case, host or scheme must differ" + + protocol_port_map = { + "http": 80, + "https": 443, + } + etc_hosts = { + (url_from.host, protocol_port_map[server_from.scheme]): server_from, + (url_to.host, protocol_port_map[server_to.scheme]): server_to, + } + + class FakeResolver(AbstractResolver): + async def resolve( + self, + host: str, + port: int = 0, + family: socket.AddressFamily = socket.AF_INET, + ): + server = etc_hosts[(host, port)] + assert server.port is not None + + return [ + { + "hostname": host, + "host": server.host, + "port": server.port, + "family": socket.AF_INET, + "proto": 0, + "flags": socket.AI_NUMERICHOST, + } + ] + + async def close(self) -> None: + """Dummy""" + + connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False) + + async with aiohttp.ClientSession( + connector=connector, + base_url="http://host1.com", + auth=aiohttp.BasicAuth("user", "pass"), + ) as client: + resp = await client.get("/path1") + assert resp.status == 200 + + async def test_async_with_session() -> None: async with aiohttp.ClientSession() as session: pass @@ -3184,9 +3379,7 @@ def connection_made(self, transport): def data_received(self, data): self.data += data if data.endswith(b"\r\n\r\n"): - self.transp.write( - b"HTTP/1.1 200 OK\r\n" b"CONTENT-LENGTH: 2\r\n" b"\r\n" b"ok" - ) + self.transp.write(b"HTTP/1.1 200 OK\r\nCONTENT-LENGTH: 2\r\n\r\nok") self.transp.close() def connection_lost(self, exc): @@ -3596,9 +3789,10 @@ async def handler(request): assert resp.reason == "x" * 8191 -@pytest.mark.xfail(raises=asyncio.TimeoutError, reason="#7599") -async def test_rejected_upload(aiohttp_client, tmp_path) -> None: - async def ok_handler(request): +async def test_rejected_upload( + aiohttp_client: AiohttpClient, tmp_path: pathlib.Path +) -> None: + async def ok_handler(request: web.Request) -> web.Response: return web.Response() async def not_ok_handler(request): @@ -3615,13 +3809,11 @@ async def not_ok_handler(request): with open(file_path, "rb") as file: data = {"file": file} - async with await client.post("/not_ok", data=data) as resp_not_ok: - assert 400 == resp_not_ok.status + async with client.post("/not_ok", data=data) as resp_not_ok: + assert resp_not_ok.status == 400 - async with await client.get( - "/ok", timeout=aiohttp.ClientTimeout(total=0.01) - ) as resp_ok: - assert 200 == resp_ok.status + async with client.get("/ok", timeout=aiohttp.ClientTimeout(total=1)) as resp_ok: + assert resp_ok.status == 200 @pytest.mark.parametrize( @@ -3647,3 +3839,20 @@ async def handler(_: web.Request) -> web.Response: session = await aiohttp_client(app, raise_for_status=None) # type: ignore[arg-type] await session.get("/") + + +async def test_exception_when_read_outside_of_session( + aiohttp_server: AiohttpServer, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(body=b"1" * 1000000) + + app = web.Application() + app.router.add_get("/", handler) + + server = await aiohttp_server(app) + async with aiohttp.ClientSession() as sess: + resp = await sess.get(server.make_url("/")) + + with pytest.raises(RuntimeError, match="Connection closed"): + await resp.read() diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py index d8ffac0059c..ba45d6a6839 100644 --- a/tests/test_client_proto.py +++ b/tests/test_client_proto.py @@ -50,7 +50,7 @@ async def test_uncompleted_message(loop) -> None: proto.set_response_params(read_until_eof=True) proto.data_received( - b"HTTP/1.1 301 Moved Permanently\r\n" b"Location: http://python.org/" + b"HTTP/1.1 301 Moved Permanently\r\nLocation: http://python.org/" ) proto.connection_lost(None) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 7d9f69b52f0..2d70ebdd4f2 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -453,6 +453,13 @@ def test_basic_auth_from_url(make_request) -> None: assert "python.org" == req.host +def test_basic_auth_no_user_from_url(make_request) -> None: + req = make_request("get", "http://:1234@python.org") + assert "AUTHORIZATION" in req.headers + assert "Basic OjEyMzQ=" == req.headers["AUTHORIZATION"] + assert "python.org" == req.host + + def test_basic_auth_from_url_overridden(make_request) -> None: req = make_request( "get", "http://garbage@python.org", auth=aiohttp.BasicAuth("nkim", "1234") diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index a790fba43ec..31ec7576c97 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -91,7 +91,7 @@ async def test_ws_connect_read_timeout_stays_inf( res = await aiohttp.ClientSession().ws_connect( "http://test.org", protocols=("t1", "t2", "chat"), - receive_timeout=0.5, + timeout=aiohttp.ClientWSTimeout(0.5), ) assert isinstance(res, client.ClientWebSocketResponse) @@ -122,7 +122,7 @@ async def test_ws_connect_read_timeout_reset_to_max( res = await aiohttp.ClientSession().ws_connect( "http://test.org", protocols=("t1", "t2", "chat"), - receive_timeout=1.0, + timeout=aiohttp.ClientWSTimeout(1.0), ) assert isinstance(res, client.ClientWebSocketResponse) @@ -600,8 +600,9 @@ async def test_reader_read_exception(ws_key, key_data, loop) -> None: async def test_receive_runtime_err(loop) -> None: + timeout = aiohttp.ClientWSTimeout(ws_receive=10.0) resp = client.ClientWebSocketResponse( - mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), 10.0, True, True, loop + mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), timeout, True, True, loop ) resp._waiting = True diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 274092a189a..30da0dca802 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -7,6 +7,7 @@ import aiohttp from aiohttp import ServerTimeoutError, WSMsgType, hdrs, web +from aiohttp.client_ws import ClientWSTimeout from aiohttp.http import WSCloseCode from aiohttp.pytest_plugin import AiohttpClient @@ -394,7 +395,7 @@ async def handler(request): assert resp.closed -async def test_close_timeout(aiohttp_client) -> None: +async def test_close_timeout_sock_close_read(aiohttp_client) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) @@ -406,7 +407,39 @@ async def handler(request): app = web.Application() app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", timeout=0.2, autoclose=False) + timeout = ClientWSTimeout(ws_close=0.2) + resp = await client.ws_connect("/", timeout=timeout, autoclose=False) + + await resp.send_bytes(b"ask") + + msg = await resp.receive() + assert msg.data == "test" + assert msg.type == aiohttp.WSMsgType.TEXT + + msg = await resp.close() + assert resp.closed + assert isinstance(resp.exception(), asyncio.TimeoutError) + + +async def test_close_timeout_deprecated(aiohttp_client) -> None: + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive_bytes() + await ws.send_str("test") + await asyncio.sleep(1) + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + client = await aiohttp_client(app) + with pytest.warns( + DeprecationWarning, + match="parameter 'timeout' of type 'float' " + "is deprecated, please use " + r"'timeout=ClientWSTimeout\(ws_close=...\)'", + ): + resp = await client.ws_connect("/", timeout=0.2, autoclose=False) await resp.send_bytes(b"ask") @@ -535,7 +568,7 @@ async def handler(request): await resp.close() -async def test_receive_timeout(aiohttp_client) -> None: +async def test_receive_timeout_sock_read(aiohttp_client) -> None: async def handler(request): ws = web.WebSocketResponse() await ws.prepare(request) @@ -547,10 +580,38 @@ async def handler(request): app.router.add_route("GET", "/", handler) client = await aiohttp_client(app) - resp = await client.ws_connect("/", receive_timeout=0.1) + receive_timeout = ClientWSTimeout(ws_receive=0.1) + resp = await client.ws_connect("/", timeout=receive_timeout) with pytest.raises(asyncio.TimeoutError): - await resp.receive(0.05) + await resp.receive(timeout=0.05) + + await resp.close() + + +async def test_receive_timeout_deprecation(aiohttp_client) -> None: + + async def handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + await ws.receive() + await ws.close() + return ws + + app = web.Application() + app.router.add_route("GET", "/", handler) + + client = await aiohttp_client(app) + with pytest.warns( + DeprecationWarning, + match="float parameter 'receive_timeout' " + "is deprecated, please use parameter " + r"'timeout=ClientWSTimeout\(ws_receive=...\)'", + ): + resp = await client.ws_connect("/", receive_timeout=0.1) + + with pytest.raises(asyncio.TimeoutError): + await resp.receive(timeout=0.05) await resp.close() diff --git a/tests/test_connector.py b/tests/test_connector.py index 8dd7a294b30..0129f0cc330 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -9,7 +9,7 @@ import sys import uuid from collections import deque -from contextlib import closing +from contextlib import closing, suppress from typing import Any, List, Optional, Type from unittest import mock @@ -1667,7 +1667,41 @@ async def test_close_cancels_cleanup_handle(loop) -> None: assert conn._cleanup_handle is None -async def test_close_abort_closed_transports(loop) -> None: +async def test_close_cancels_resolve_host(loop: asyncio.AbstractEventLoop) -> None: + cancelled = False + + async def delay_resolve_host(*args: object) -> None: + """Delay _resolve_host() task in order to test cancellation.""" + nonlocal cancelled + try: + await asyncio.sleep(10) + except asyncio.CancelledError: + cancelled = True + raise + + conn = aiohttp.TCPConnector() + req = ClientRequest( + "GET", URL("http://localhost:80"), loop=loop, response_class=mock.Mock() + ) + with mock.patch.object(conn, "_resolve_host_with_throttle", delay_resolve_host): + t = asyncio.create_task(conn.connect(req, [], ClientTimeout())) + # Let it create the internal task + await asyncio.sleep(0) + # Let that task start running + await asyncio.sleep(0) + + # We now have a task being tracked and can ensure that .close() cancels it. + assert len(conn._resolve_host_tasks) == 1 + await conn.close() + await asyncio.sleep(0.01) + assert cancelled + assert len(conn._resolve_host_tasks) == 0 + + with suppress(asyncio.CancelledError): + await t + + +async def test_close_abort_closed_transports(loop: asyncio.AbstractEventLoop) -> None: tr = mock.Mock() conn = aiohttp.BaseConnector(loop=loop) diff --git a/tests/test_formdata.py b/tests/test_formdata.py index 4bb8aa07587..db1a3861c56 100644 --- a/tests/test_formdata.py +++ b/tests/test_formdata.py @@ -1,3 +1,4 @@ +import io from unittest import mock import pytest @@ -46,6 +47,16 @@ def test_invalid_formdata_params2() -> None: FormData("as") # 2-char str is not allowed +async def test_formdata_textio_charset(buf: bytearray, writer) -> None: + form = FormData() + body = io.TextIOWrapper(io.BytesIO(b"\xe6\x97\xa5\xe6\x9c\xac"), encoding="utf-8") + form.add_field("foo", body, content_type="text/plain; charset=shift-jis") + payload = form() + await payload.write(writer) + assert b"charset=shift-jis" in buf + assert b"\x93\xfa\x96{" in buf + + def test_invalid_formdata_content_type() -> None: form = FormData() invalid_vals = [0, 0.1, {}, [], b"foo"] diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 67af32dc3be..656364f43aa 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -189,6 +189,14 @@ def test_basic_auth_from_url() -> None: assert auth.password == "pass" +def test_basic_auth_no_user_from_url() -> None: + url = URL("http://:pass@example.com") + auth = helpers.BasicAuth.from_url(url) + assert auth is not None + assert auth.login == "" + assert auth.password == "pass" + + def test_basic_auth_from_not_url() -> None: with pytest.raises(TypeError): helpers.BasicAuth.from_url("http://user:pass@example.com") @@ -317,7 +325,8 @@ def test_ipv6_addresses() -> None: def test_host_addresses() -> None: hosts = [ - "www.four.part.host" "www.python.org", + "www.four.part.host", + "www.python.org", "foo.bar", "localhost", ] diff --git a/tests/test_http_exceptions.py b/tests/test_http_exceptions.py index 24944d9fc4e..cd3b08f59db 100644 --- a/tests/test_http_exceptions.py +++ b/tests/test_http_exceptions.py @@ -81,7 +81,7 @@ def test_pickle(self) -> None: pickled = pickle.dumps(err, proto) err2 = pickle.loads(pickled) assert err2.code == 400 - assert err2.message == ("Got more than 10 bytes (12) " "when reading spam.") + assert err2.message == ("Got more than 10 bytes (12) when reading spam.") assert err2.headers is None assert err2.foo == "bar" diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 74700df4253..75276df1a07 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -84,6 +84,7 @@ def response(loop: Any, protocol: Any, request: Any): max_line_size=8190, max_headers=32768, max_field_size=8190, + read_until_eof=True, ) @@ -442,49 +443,49 @@ def test_conn_default_1_1(parser) -> None: def test_conn_close(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: close\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_close_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: close\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: close\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_keep_alive_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: keep-alive\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_keep_alive_1_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: keep-alive\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: keep-alive\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_conn_other_1_0(parser) -> None: - text = b"GET /test HTTP/1.0\r\n" b"connection: test\r\n\r\n" + text = b"GET /test HTTP/1.0\r\nconnection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.should_close def test_conn_other_1_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"connection: test\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nconnection: test\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert not msg.should_close def test_request_chunked(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert msg.chunked @@ -506,7 +507,7 @@ def test_request_te_chunked_with_content_length(parser: Any) -> None: def test_request_te_chunked123(parser: Any) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked123\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked123\r\n\r\n" with pytest.raises( http_exceptions.BadHttpMessage, match="Request has invalid `Transfer-Encoding`", @@ -514,6 +515,23 @@ def test_request_te_chunked123(parser: Any) -> None: parser.feed_data(text) +async def test_request_te_last_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + assert await messages[0][1].read() == b"Test" + + +def test_request_te_first_chunked(parser: Any) -> None: + text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked, not\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + with pytest.raises( + http_exceptions.BadHttpMessage, + match="nvalid `Transfer-Encoding`", + ): + parser.feed_data(text) + + def test_conn_upgrade(parser: Any) -> None: text = ( b"GET /test HTTP/1.1\r\n" @@ -537,21 +555,21 @@ def test_bad_upgrade(parser) -> None: def test_compression_empty(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: \r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: \r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_compression_deflate(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: deflate\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: deflate\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "deflate" def test_compression_gzip(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: gzip\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: gzip\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "gzip" @@ -559,21 +577,21 @@ def test_compression_gzip(parser) -> None: @pytest.mark.skipif(brotli is None, reason="brotli is not installed") def test_compression_brotli(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: br\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: br\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression == "br" def test_compression_unknown(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-encoding: compress\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] assert msg.compression is None def test_url_connect(parser: Any) -> None: - text = b"CONNECT www.google.com HTTP/1.1\r\n" b"content-length: 0\r\n\r\n" + text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade @@ -581,7 +599,7 @@ def test_url_connect(parser: Any) -> None: def test_headers_connect(parser: Any) -> None: - text = b"CONNECT www.google.com HTTP/1.1\r\n" b"content-length: 0\r\n\r\n" + text = b"CONNECT www.google.com HTTP/1.1\r\ncontent-length: 0\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg, payload = messages[0] assert upgrade @@ -601,21 +619,21 @@ def test_url_absolute(parser: Any) -> None: def test_headers_old_websocket_key1(parser: Any) -> None: - text = b"GET /test HTTP/1.1\r\n" b"SEC-WEBSOCKET-KEY1: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\nSEC-WEBSOCKET-KEY1: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_1(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-length: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-length: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_headers_content_length_err_2(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"content-length: -1\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ncontent-length: -1\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) @@ -638,7 +656,7 @@ def test_headers_content_length_err_2(parser) -> None: @pytest.mark.parametrize("pad2", _pad.keys(), ids=["post-" + n for n in _pad.values()]) @pytest.mark.parametrize("pad1", _pad.keys(), ids=["pre-" + n for n in _pad.values()]) def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> None: - text = b"GET /test HTTP/1.1\r\n" b"%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) + text = b"GET /test HTTP/1.1\r\n%s%s%s: value\r\n\r\n" % (pad1, hdr, pad2) expectation = pytest.raises(http_exceptions.BadHttpMessage) if pad1 == pad2 == b"" and hdr != b"": # one entry in param matrix is correct: non-empty name, not padded @@ -648,19 +666,19 @@ def test_invalid_header_spacing(parser, pad1: bytes, pad2: bytes, hdr: bytes) -> def test_empty_header_name(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b":test\r\n\r\n" + text = b"GET /test HTTP/1.1\r\n:test\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_header(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"test line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntest line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) def test_invalid_name(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"test[]: line\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntest[]: line\r\n\r\n" with pytest.raises(http_exceptions.BadHttpMessage): parser.feed_data(text) @@ -697,7 +715,7 @@ def test_max_header_field_size_under_limit(parser) -> None: @pytest.mark.parametrize("size", [40960, 8191]) def test_max_header_value_size(parser, size) -> None: name = b"t" * size - text = b"GET /test HTTP/1.1\r\n" b"data:" + name + b"\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ndata:" + name + b"\r\n\r\n" match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): @@ -706,7 +724,7 @@ def test_max_header_value_size(parser, size) -> None: def test_max_header_value_size_under_limit(parser) -> None: value = b"A" * 8190 - text = b"GET /test HTTP/1.1\r\n" b"data:" + value + b"\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ndata:" + value + b"\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) msg = messages[0][0] @@ -1154,6 +1172,23 @@ async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None response.feed_data(text) +async def test_http_response_parser_notchunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: notchunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + response.feed_eof() + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + + +async def test_http_response_parser_last_chunked(response) -> None: + text = b"HTTP/1.1 200 OK\r\nTransfer-Encoding: not, chunked\r\n\r\n1\r\nT\r\n3\r\nest\r\n0\r\n\r\n" + messages, upgrade, tail = response.feed_data(text) + + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + assert await messages[0][1].read() == b"Test" + + def test_http_response_parser_bad(response) -> None: with pytest.raises(http_exceptions.BadHttpMessage): response.feed_data(b"HTT/1\r\n\r\n") @@ -1181,7 +1216,7 @@ def test_http_response_parser_code_not_ascii(response, nonascii_digit: bytes) -> def test_http_request_chunked_payload(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] assert msg.chunked @@ -1196,7 +1231,7 @@ def test_http_request_chunked_payload(parser) -> None: def test_http_request_chunked_payload_and_next_message(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] messages, upgraded, tail = parser.feed_data( @@ -1218,7 +1253,7 @@ def test_http_request_chunked_payload_and_next_message(parser) -> None: def test_http_request_chunked_payload_chunks(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b"4\r\ndata\r") @@ -1240,7 +1275,7 @@ def test_http_request_chunked_payload_chunks(parser) -> None: def test_parse_chunked_payload_chunk_extension(parser) -> None: - text = b"GET /test HTTP/1.1\r\n" b"transfer-encoding: chunked\r\n\r\n" + text = b"GET /test HTTP/1.1\r\ntransfer-encoding: chunked\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n") @@ -1260,14 +1295,14 @@ def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: An def test_parse_payload_response_without_body(loop, protocol, response_cls) -> None: parser = response_cls(protocol, loop, 2**16, response_with_body=False) - text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 10\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ncontent-length: 10\r\n\r\n" msg, payload = parser.feed_data(text)[0][0] assert payload.is_eof() def test_parse_length_payload(response) -> None: - text = b"HTTP/1.1 200 Ok\r\n" b"content-length: 4\r\n\r\n" + text = b"HTTP/1.1 200 Ok\r\ncontent-length: 4\r\n\r\n" msg, payload = response.feed_data(text)[0][0] assert not payload.is_eof() @@ -1592,7 +1627,7 @@ async def test_parse_chunked_payload_split_end_trailers3(self, protocol) -> None async def test_parse_chunked_payload_split_end_trailers4(self, protocol) -> None: out = aiohttp.StreamReader(protocol, 2**16, loop=None) p = HttpPayloadParser(out, chunked=True) - p.feed_data(b"4\r\nasdf\r\n0\r\n" b"C") + p.feed_data(b"4\r\nasdf\r\n0\r\nC") p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n") assert out.is_eof() diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index 5649f32f792..db50ad65f67 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -108,7 +108,7 @@ async def test_write_payload_chunked_filter_mutiple_chunks(protocol, transport, await msg.write_eof() content = b"".join([c[1][0] for c in list(write.mock_calls)]) assert content.endswith( - b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n" b"2\r\na2\r\n0\r\n\r\n" + b"2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n2\r\na2\r\n0\r\n\r\n" ) @@ -136,7 +136,7 @@ async def test_write_payload_deflate_and_chunked(buf, protocol, transport, loop) await msg.write(b"ta") await msg.write_eof() - thing = b"2\r\nx\x9c\r\n" b"a\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n" b"0\r\n\r\n" + thing = b"2\r\nx\x9c\r\na\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n0\r\n\r\n" assert thing == buf @@ -163,8 +163,8 @@ async def test_write_payload_short_ints_memoryview(buf, protocol, transport, loo await msg.write_eof() endians = ( - (b"6\r\n" b"\x00A\x00B\x00C\r\n" b"0\r\n\r\n"), - (b"6\r\n" b"A\x00B\x00C\x00\r\n" b"0\r\n\r\n"), + (b"6\r\n\x00A\x00B\x00C\r\n0\r\n\r\n"), + (b"6\r\nA\x00B\x00C\x00\r\n0\r\n\r\n"), ) assert buf in endians @@ -179,7 +179,7 @@ async def test_write_payload_2d_shape_memoryview(buf, protocol, transport, loop) await msg.write(payload) await msg.write_eof() - thing = b"6\r\n" b"ABCDEF\r\n" b"0\r\n\r\n" + thing = b"6\r\nABCDEF\r\n0\r\n\r\n" assert thing == buf diff --git a/tests/test_multipart.py b/tests/test_multipart.py index bbbc1c666ca..8576998962e 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -463,7 +463,7 @@ async def test_read_text_guess_encoding(self) -> None: assert data == result async def test_read_text_compressed(self) -> None: - data = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00" b"%s--:--" % newline + data = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00%s--:--" % newline with Stream(data) as stream: obj = aiohttp.BodyPartReader( BOUNDARY, @@ -515,9 +515,7 @@ async def test_read_json_guess_encoding(self) -> None: assert {"тест": "пассед"} == result async def test_read_json_compressed(self) -> None: - with Stream( - b"\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00" b"%s--:--" % newline - ) as stream: + with Stream(b"\xabV*I-.Q\xb2RP*H,.NMQ\xaa\x05\x00%s--:--" % newline) as stream: obj = aiohttp.BodyPartReader( BOUNDARY, {CONTENT_ENCODING: "deflate", CONTENT_TYPE: "application/json"}, @@ -712,7 +710,7 @@ def test_dispatch_multipart(self) -> None: b"----:--", b"", b"passed", - b"----:----" b"--:--", + b"----:------:--", ] ) ) as stream: diff --git a/tests/test_multipart_helpers.py b/tests/test_multipart_helpers.py index 9516751cba9..d4fb610a22c 100644 --- a/tests/test_multipart_helpers.py +++ b/tests/test_multipart_helpers.py @@ -555,10 +555,10 @@ def test_attfncontqs(self) -> None: def test_attfncontenc(self) -> None: disptype, params = parse_content_disposition( - "attachment; filename*0*=UTF-8" 'foo-%c3%a4; filename*1=".html"' + "attachment; filename*0*=UTF-8" + 'foo-%c3%a4; filename*1=".html"' ) assert "attachment" == disptype - assert {"filename*0*": "UTF-8" "foo-%c3%a4", "filename*1": ".html"} == params + assert {"filename*0*": "UTF-8foo-%c3%a4", "filename*1": ".html"} == params def test_attfncontlz(self) -> None: disptype, params = parse_content_disposition( @@ -590,14 +590,14 @@ def test_attfncontord(self) -> None: def test_attfnboth(self) -> None: disptype, params = parse_content_disposition( - 'attachment; filename="foo-ae.html";' " filename*=UTF-8''foo-%c3%a4.html" + 'attachment; filename="foo-ae.html";' + " filename*=UTF-8''foo-%c3%a4.html" ) assert "attachment" == disptype assert {"filename": "foo-ae.html", "filename*": "foo-ä.html"} == params def test_attfnboth2(self) -> None: disptype, params = parse_content_disposition( - "attachment; filename*=UTF-8''foo-%c3%a4.html;" ' filename="foo-ae.html"' + "attachment; filename*=UTF-8''foo-%c3%a4.html;" + ' filename="foo-ae.html"' ) assert "attachment" == disptype assert {"filename": "foo-ae.html", "filename*": "foo-ä.html"} == params diff --git a/tests/test_payload.py b/tests/test_payload.py index c8681cb5ebe..0e2db91135b 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -17,6 +17,9 @@ def registry(): class Payload(payload.Payload): + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + assert False + async def write(self, writer): pass diff --git a/tests/test_resolver.py b/tests/test_resolver.py index f51506a6999..825db81e41b 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -9,7 +9,6 @@ from aiohttp.resolver import ( _NUMERIC_SOCKET_FLAGS, - _SUPPORTS_SCOPE_ID, AsyncResolver, DefaultResolver, ThreadedResolver, @@ -136,9 +135,6 @@ async def test_async_resolver_positive_ipv4_lookup(loop: Any) -> None: @pytest.mark.skipif(not getaddrinfo, reason="aiodns >=3.2.0 required") -@pytest.mark.skipif( - not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" -) async def test_async_resolver_positive_link_local_ipv6_lookup(loop: Any) -> None: with patch("aiodns.DNSResolver") as mock: mock().getaddrinfo.return_value = fake_aiodns_getaddrinfo_ipv6_result( @@ -211,9 +207,6 @@ async def test_threaded_resolver_positive_lookup() -> None: ipaddress.ip_address(real[0]["host"]) -@pytest.mark.skipif( - not _SUPPORTS_SCOPE_ID, reason="python version does not support scope id" -) async def test_threaded_resolver_positive_ipv6_link_local_lookup() -> None: loop = Mock() loop.getaddrinfo = fake_ipv6_addrinfo(["fe80::1"]) diff --git a/tests/test_run_app.py b/tests/test_run_app.py index c1d5f8e14f4..74d8c79bf55 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,7 +9,7 @@ import subprocess import sys import time -from typing import Callable, NoReturn, Set +from typing import AsyncIterator, Callable, NoReturn, Set from unittest import mock from uuid import uuid4 @@ -906,6 +906,23 @@ async def init(): assert count == 3 +def test_run_app_raises_exception(patched_loop: asyncio.AbstractEventLoop) -> None: + async def context(app: web.Application) -> AsyncIterator[None]: + raise RuntimeError("foo") + yield # pragma: no cover + + app = web.Application() + app.cleanup_ctx.append(context) + + with mock.patch.object( + patched_loop, "call_exception_handler", autospec=True, spec_set=True + ) as m: + with pytest.raises(RuntimeError, match="foo"): + web.run_app(app, loop=patched_loop) + + assert not m.called + + class TestShutdown: def raiser(self) -> NoReturn: raise KeyboardInterrupt diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index 328f83c3fd4..77349246616 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -239,6 +239,11 @@ def test_make_mocked_request_content() -> None: assert req.content is payload +async def test_make_mocked_request_empty_payload() -> None: + req = make_mocked_request("GET", "/") + assert await req.read() == b"" + + def test_make_mocked_request_transport() -> None: transport = mock.Mock() req = make_mocked_request("GET", "/", transport=transport) diff --git a/tests/test_urldispatch.py b/tests/test_urldispatch.py index 2453ab5a235..d0efa91593e 100644 --- a/tests/test_urldispatch.py +++ b/tests/test_urldispatch.py @@ -368,7 +368,7 @@ def test_add_static_append_version(router) -> None: resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="/data.unknown_mime_type", append_version=True) expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) @@ -379,7 +379,7 @@ def test_add_static_append_version_set_from_constructor(router) -> None: ) url = resource.url_for(filename="/data.unknown_mime_type") expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) @@ -397,7 +397,7 @@ def test_add_static_append_version_filename_without_slash(router) -> None: resource = router.add_static("/st", pathlib.Path(__file__).parent, name="static") url = resource.url_for(filename="data.unknown_mime_type", append_version=True) expect_url = ( - "/st/data.unknown_mime_type?" "v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" + "/st/data.unknown_mime_type?v=aUsn8CHEhhszc81d28QmlcBW0KQpfS2F4trgQKhOYd8%3D" ) assert expect_url == str(url) @@ -457,7 +457,7 @@ def test_add_static_quoting(router) -> None: ) assert router["static"] is resource url = resource.url_for(filename="/1 2/файл%2F.txt") - assert url.path == "/пре /фикс/1 2/файл%2F.txt" + assert url.path == "/пре %2Fфикс/1 2/файл%2F.txt" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%84%D0%B0%D0%B9%D0%BB%252F.txt" @@ -530,19 +530,24 @@ def test_static_remove_trailing_slash(router) -> None: assert "/prefix" == route._prefix -async def test_add_route_with_re(router) -> None: +@pytest.mark.parametrize( + "pattern,url,expected", + ( + (r"{to:\d+}", r"1234", {"to": "1234"}), + ("{name}.html", "test.html", {"name": "test"}), + (r"{fn:\w+ \d+}", "abc 123", {"fn": "abc 123"}), + (r"{fn:\w+\s\d+}", "abc 123", {"fn": "abc 123"}), + ), +) +async def test_add_route_with_re( + router: web.UrlDispatcher, pattern: str, url: str, expected +) -> None: handler = make_handler() - router.add_route("GET", r"/handler/{to:\d+}", handler) - - req = make_mocked_request("GET", "/handler/1234") + router.add_route("GET", f"/handler/{pattern}", handler) + req = make_mocked_request("GET", f"/handler/{url}") info = await router.resolve(req) assert info is not None - assert {"to": "1234"} == info - - router.add_route("GET", r"/handler/{name}.html", handler) - req = make_mocked_request("GET", "/handler/test.html") - info = await router.resolve(req) - assert {"name": "test"} == info + assert info == expected async def test_add_route_with_re_and_slashes(router) -> None: @@ -625,7 +630,7 @@ def test_route_dynamic_quoting(router) -> None: route = router.add_route("GET", r"/пре %2Fфикс/{arg}", handler) url = route.url_for(arg="1 2/текст%2F") - assert url.path == "/пре /фикс/1 2/текст%2F" + assert url.path == "/пре %2Fфикс/1 2/текст%2F" assert str(url) == ( "/%D0%BF%D1%80%D0%B5%20%2F%D1%84%D0%B8%D0%BA%D1%81" "/1%202/%D1%82%D0%B5%D0%BA%D1%81%D1%82%252F" diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 3d3aa2479f6..6a86a3458a3 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -1,6 +1,6 @@ import asyncio import gc -from typing import AsyncIterator, Callable, Iterator, NoReturn +from typing import AsyncIterator, Callable, Iterator, NoReturn, Type from unittest import mock import pytest @@ -476,7 +476,10 @@ async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]: assert ctx_state == "CLEAN" -async def test_cleanup_ctx_exception_on_cleanup_multiple() -> None: +@pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError)) +async def test_cleanup_ctx_exception_on_cleanup_multiple( + exc_cls: Type[BaseException], +) -> None: app = web.Application() out = [] @@ -488,7 +491,7 @@ async def inner(app: web.Application) -> AsyncIterator[None]: yield None out.append("post_" + str(num)) if fail: - raise Exception("fail_" + str(num)) + raise exc_cls("fail_" + str(num)) return inner diff --git a/tests/test_web_cli.py b/tests/test_web_cli.py index 12a01dff577..381aaf6cd82 100644 --- a/tests/test_web_cli.py +++ b/tests/test_web_cli.py @@ -90,7 +90,7 @@ def test_path_when_unsupported(mocker, monkeypatch) -> None: web.main(argv) error.assert_called_with( - "file system paths not supported by your" " operating environment" + "file system paths not supported by your operating environment" ) @@ -107,7 +107,7 @@ def test_entry_func_call(mocker) -> None: web.main(argv) module.func.assert_called_with( - ("--extra-optional-eins --extra-optional-zwei extra positional " "args").split() + ("--extra-optional-eins --extra-optional-zwei extra positional args").split() ) diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index ee61537068b..ad9e7c288fc 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -4,7 +4,7 @@ import pathlib import socket import zlib -from typing import Any, Optional +from typing import Any, NoReturn, Optional from unittest import mock import pytest @@ -101,12 +101,8 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") - - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + async with client.get("/") as resp: + assert resp.status == 500 async def test_handler_returns_none(aiohttp_server, aiohttp_client) -> None: @@ -121,13 +117,22 @@ async def handler(request): server = await aiohttp_server(app, logger=logger) client = await aiohttp_client(server) - with pytest.raises(aiohttp.ServerDisconnectedError): - await client.get("/") + async with client.get("/") as resp: + assert resp.status == 500 - # Actual error text is placed in exc_info - logger.exception.assert_called_with( - "Unhandled runtime exception", exc_info=mock.ANY - ) + +async def test_handler_returns_not_response_after_100expect( + aiohttp_server, aiohttp_client +) -> None: + async def handler(request: web.Request) -> NoReturn: + raise Exception("foo") + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/", expect100=True) as resp: + assert resp.status == 500 async def test_head_returns_empty_body(aiohttp_client) -> None: @@ -148,6 +153,21 @@ async def handler(request): assert resp.headers["Content-Length"] == "4" +@pytest.mark.parametrize("status", (201, 204, 404)) +async def test_default_content_type_no_body(aiohttp_client: Any, status: int) -> None: + async def handler(request): + return web.Response(status=status) + + app = web.Application() + app.router.add_get("/", handler) + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.status == status + assert await resp.read() == b"" + assert "Content-Type" not in resp.headers + + async def test_response_before_complete(aiohttp_client: Any) -> None: async def handler(request): return web.Response(body=b"OK") @@ -1631,9 +1651,7 @@ async def handler(request): resp = await client.post("/", data=data) assert 413 == resp.status resp_text = await resp.text() - assert ( - "Maximum request body size 1048576 exceeded, " "actual body size" in resp_text - ) + assert "Maximum request body size 1048576 exceeded, actual body size" in resp_text # Maximum request body size X exceeded, actual body size X body_size = int(resp_text.split()[-1]) assert body_size >= max_size @@ -1665,9 +1683,7 @@ async def handler(request): resp = await client.post("/", data=too_large_data) assert 413 == resp.status resp_text = await resp.text() - assert ( - "Maximum request body size 2097152 exceeded, " "actual body size" in resp_text - ) + assert "Maximum request body size 2097152 exceeded, actual body size" in resp_text # Maximum request body size X exceeded, actual body size X body_size = int(resp_text.split()[-1]) assert body_size >= custom_max_size diff --git a/tests/test_web_request.py b/tests/test_web_request.py index c6398ac1836..ff22e19d5b4 100644 --- a/tests/test_web_request.py +++ b/tests/test_web_request.py @@ -169,6 +169,22 @@ def test_absolute_url() -> None: assert req.rel_url == URL.build(path="/path/to", query={"a": "1"}) +def test_clone_absolute_scheme() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.scheme == "https" + req2 = req.clone(scheme="http") + assert req2.scheme == "http" + assert req2.url.scheme == "http" + + +def test_clone_absolute_host() -> None: + req = make_mocked_request("GET", "https://example.com/path/to?a=1") + assert req.host == "example.com" + req2 = req.clone(host="foo.test") + assert req2.host == "foo.test" + assert req2.url.host == "foo.test" + + def test_content_length() -> None: req = make_mocked_request("Get", "/", CIMultiDict([("CONTENT-LENGTH", "123")])) @@ -612,7 +628,7 @@ async def test_multipart_formdata(protocol) -> None: b"-----------------------------326931944431359--\r\n" ) content_type = ( - "multipart/form-data; boundary=" "---------------------------326931944431359" + "multipart/form-data; boundary=---------------------------326931944431359" ) payload.feed_eof() req = make_mocked_request( @@ -633,7 +649,7 @@ async def test_multipart_formdata_file(protocol) -> None: b"-----------------------------326931944431359--\r\n" ) content_type = ( - "multipart/form-data; boundary=" "---------------------------326931944431359" + "multipart/form-data; boundary=---------------------------326931944431359" ) payload.feed_eof() req = make_mocked_request( @@ -684,18 +700,23 @@ def test_save_state_on_clone() -> None: def test_clone_scheme() -> None: req = make_mocked_request("GET", "/") + assert req.scheme == "http" req2 = req.clone(scheme="https") assert req2.scheme == "https" + assert req2.url.scheme == "https" def test_clone_host() -> None: req = make_mocked_request("GET", "/") + assert req.host != "example.com" req2 = req.clone(host="example.com") assert req2.host == "example.com" + assert req2.url.host == "example.com" def test_clone_remote() -> None: req = make_mocked_request("GET", "/") + assert req.remote != "11.11.11.11" req2 = req.clone(remote="11.11.11.11") assert req2.remote == "11.11.11.11" diff --git a/tests/test_web_response.py b/tests/test_web_response.py index d1b407c090c..2e1e332e0a5 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1,8 +1,10 @@ import collections.abc import datetime import gzip +import io import json from concurrent.futures import ThreadPoolExecutor +from typing import AsyncIterator, Optional from unittest import mock import aiosignal @@ -13,7 +15,8 @@ from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs from aiohttp.helpers import ETag from aiohttp.http_writer import StreamWriter, _serialize_headers -from aiohttp.payload import BytesPayload +from aiohttp.multipart import BodyPartReader, MultipartWriter +from aiohttp.payload import BytesPayload, StringPayload from aiohttp.test_utils import make_mocked_coro, make_mocked_request from aiohttp.web import ContentCoding, Response, StreamResponse, json_response @@ -773,11 +776,8 @@ async def test___repr___after_eof() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) - assert resp.prepared - await resp.write(b"data") await resp.write_eof() - assert not resp.prepared resp_repr = repr(resp) assert resp_repr == "" @@ -1122,6 +1122,48 @@ def test_assign_nonstr_text() -> None: assert 4 == resp.content_length +mpwriter = MultipartWriter(boundary="x") +mpwriter.append_payload(StringPayload("test")) + + +async def async_iter() -> AsyncIterator[str]: + yield "foo" # pragma: no cover + + +class CustomIO(io.IOBase): + def __init__(self): + self._lines = [b"", b"", b"test"] + + def read(self, size: int = -1) -> bytes: + return self._lines.pop() + + +@pytest.mark.parametrize( + "payload,expected", + ( + ("test", "test"), + (CustomIO(), "test"), + (io.StringIO("test"), "test"), + (io.TextIOWrapper(io.BytesIO(b"test")), "test"), + (io.BytesIO(b"test"), "test"), + (io.BufferedReader(io.BytesIO(b"test")), "test"), + (async_iter(), None), + (BodyPartReader("x", CIMultiDictProxy(CIMultiDict()), mock.Mock()), None), + ( + mpwriter, + "--x\nContent-Type: text/plain; charset=utf-8\r\nContent-Length: 4\r\n\r\ntest", + ), + ), +) +def test_payload_body_get_text(payload, expected: Optional[str]) -> None: + resp = Response(body=payload) + if expected is None: + with pytest.raises(TypeError): + resp.text + else: + assert resp.text == expected + + def test_response_set_content_length() -> None: resp = Response() with pytest.raises(RuntimeError): @@ -1139,7 +1181,6 @@ async def test_send_headers_for_empty_body(buf, writer) -> None: Matches( "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1182,7 +1223,6 @@ async def test_send_set_cookie_header(buf, writer) -> None: "HTTP/1.1 200 OK\r\n" "Content-Length: 0\r\n" "Set-Cookie: name=value\r\n" - "Content-Type: application/octet-stream\r\n" "Date: .+\r\n" "Server: .+\r\n\r\n" ) @@ -1245,17 +1285,25 @@ def test_content_type_with_set_body() -> None: assert resp.content_type == "application/octet-stream" -def test_started_when_not_started() -> None: +def test_prepared_when_not_started() -> None: resp = StreamResponse() assert not resp.prepared -async def test_started_when_started() -> None: +async def test_prepared_when_started() -> None: resp = StreamResponse() await resp.prepare(make_request("GET", "/")) assert resp.prepared +async def test_prepared_after_eof() -> None: + resp = StreamResponse() + await resp.prepare(make_request("GET", "/")) + await resp.write(b"data") + await resp.write_eof() + assert resp.prepared + + async def test_drain_before_start() -> None: resp = StreamResponse() with pytest.raises(AssertionError): diff --git a/tests/test_web_runner.py b/tests/test_web_runner.py index c7c94263234..b71c34fe912 100644 --- a/tests/test_web_runner.py +++ b/tests/test_web_runner.py @@ -165,6 +165,13 @@ async def mock_create_server(*args, **kwargs): assert port == 8080 +async def test_tcpsite_empty_str_host(make_runner: Any) -> None: + runner = make_runner() + await runner.setup() + site = web.TCPSite(runner, host="") + assert site.name == "http://0.0.0.0:8080" + + def test_run_after_asyncio_run() -> None: async def nothing(): pass diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 3a45b9355f5..8a97acf504d 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -856,18 +856,15 @@ async def get_foobar(request: web.Request) -> web.Response: assert (await resp.text()) == "success!" -@pytest.mark.xfail( - raises=AssertionError, - reason="Regression in v3.7: https://github.com/aio-libs/aiohttp/issues/5621", -) @pytest.mark.parametrize( ("route_definition", "urlencoded_path", "expected_http_resp_status"), ( ("/467,802,24834/hello", "/467%2C802%2C24834/hello", 200), ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467%2C802%2C24834/hello", 200), + ("/467,802,24834/hello", "/467,802,24834/hello", 200), + ("/{user_ids:([0-9]+)(,([0-9]+))*}/hello", "/467,802,24834/hello", 200), ("/1%2C3/hello", "/1%2C3/hello", 404), ), - ids=("urldecoded_route", "urldecoded_route_with_regex", "urlencoded_route"), ) async def test_decoded_url_match( aiohttp_client: AiohttpClient, @@ -883,9 +880,8 @@ async def handler(request: web.Request) -> web.Response: app.router.add_get(route_definition, handler) client = await aiohttp_client(app) - r = await client.get(yarl.URL(urlencoded_path, encoded=True)) - assert r.status == expected_http_resp_status - await r.release() + async with client.get(yarl.URL(urlencoded_path, encoded=True)) as resp: + assert resp.status == expected_http_resp_status async def test_order_is_preserved(aiohttp_client: AiohttpClient) -> None: diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index 2be54486ee9..0ebd41db502 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -273,7 +273,7 @@ async def handler(request): await asyncio.sleep(0.08) assert await aborted - assert elapsed < 0.25, "close() should have returned before " "at most 2x timeout." + assert elapsed < 0.25, "close() should have returned before at most 2x timeout." await ws.close() diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 3bdd8108e35..13b46803a76 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -382,7 +382,7 @@ def test_continuation_with_close_empty(out, parser) -> None: websocket_mask_data = b"some very long data for masking by websocket" websocket_mask_mask = b"1234" websocket_mask_masked = ( - b"B]^Q\x11DVFH\x12_[_U\x13PPFR\x14W]A\x14\\S@_X" b"\\T\x14SK\x13CTP@[RYV@" + b"B]^Q\x11DVFH\x12_[_U\x13PPFR\x14W]A\x14\\S@_X\\T\x14SK\x13CTP@[RYV@" )