From 83315ef51ebcf40c353cf9cbff1b366e27d5ea4f Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 17 Oct 2025 15:59:33 +0100 Subject: [PATCH 01/21] Bump version (#11685) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 017c7a20525..443b05ff275 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.1" +__version__ = "3.13.1.dev0" from typing import TYPE_CHECKING, Tuple From 381334b1729d644f11c60b59d18352e586f96497 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 18 Oct 2025 19:26:43 +0000 Subject: [PATCH 02/21] [PR #11686/42fc48a6 backport][3.14] Raise benchmark timeout to 12 minutes (#11688) Co-authored-by: J. Nick Koston --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index ec834c6e843..881bc61cf3f 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -246,7 +246,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 9 + timeout-minutes: 12 steps: - name: Checkout project uses: actions/checkout@v5 From e6042d2e1248990592cc7e1b4b5dfd2c8da87d2f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Sat, 18 Oct 2025 19:45:34 +0000 Subject: [PATCH 03/21] [PR #11686/42fc48a6 backport][3.13] Raise benchmark timeout to 12 minutes (#11687) Co-authored-by: J. Nick Koston --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 05653d9c028..e8f51219d03 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -246,7 +246,7 @@ jobs: needs: gen_llhttp runs-on: ubuntu-latest - timeout-minutes: 9 + timeout-minutes: 12 steps: - name: Checkout project uses: actions/checkout@v5 From f401e999080cc19948c54a25ae6408fac0422c32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 12:00:52 +0000 Subject: [PATCH 04/21] Bump iniconfig from 2.1.0 to 2.3.0 (#11692) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [iniconfig](https://github.com/pytest-dev/iniconfig) from 2.1.0 to 2.3.0.
Release notes

Sourced from iniconfig's releases.

Version 2.3.0

What's Changed

Full Changelog: https://github.com/pytest-dev/iniconfig/compare/v2.2.0...v2.3.0

Version 2.2.0

No release notes provided.

Changelog

Sourced from iniconfig's changelog.

2.3.0

  • add IniConfig.parse() classmethod with strip_inline_comments parameter (fixes #55)
    • by default (strip_inline_comments=True), inline comments are properly stripped from values
    • set strip_inline_comments=False to preserve old behavior if needed
  • IniConfig() constructor maintains backward compatibility (does not strip inline comments)
  • users should migrate to IniConfig.parse() for correct comment handling
  • add strip_section_whitespace parameter to IniConfig.parse() (regarding #4)
    • opt-in parameter to strip Unicode whitespace from section names
    • when True, strips Unicode whitespace (U+00A0, U+2000, U+3000, etc.) from section names
    • when False (default), preserves existing behavior for backward compatibility
  • clarify Unicode whitespace handling (regarding #4)
    • since iniconfig 2.0.0 (Python 3 only), all strings are Unicode by default
    • Python 3's str.strip() has handled Unicode whitespace since Python 3.0 (2008)
    • iniconfig automatically benefits from this in all supported versions (Python >= 3.10)
    • key names and values have Unicode whitespace properly stripped using Python's built-in methods

2.2.0

  • drop Python 3.8 and 3.9 support (now requires Python >= 3.10)
  • add Python 3.14 classifier
  • migrate from hatchling to setuptools 77 with setuptools_scm
  • adopt PEP 639 license specifiers and PEP 740 build attestations
  • migrate from black + pyupgrade to ruff
  • migrate CI to uv and unified test workflow
  • automate GitHub releases and PyPI publishing via Trusted Publishing
  • include tests in sdist
  • modernize code for Python 3.10+ (remove future annotations, TYPE_CHECKING guards)
  • rename _ParsedLine to ParsedLine
Commits
  • 7faed13 Merge pull request #70 from RonnyPfannschmidt/comments
  • 58c0869 Refactor: Simplify IniConfig constructor and parse() method
  • 6d0af45 Add strip_section_whitespace parameter to address issue #4
  • e2d89f5 Add IniConfig.parse() classmethod to fix inline comment handling
  • 57b7ed9 Merge pull request #66 from killiandesse/pep639
  • 27ac49f Merge pull request #69 from RonnyPfannschmidt/limit-attestation
  • 3402322 Disable build attestations for PRs from forks
  • 27e6a7b Merge branch 'main' into pep639
  • 6522881 Merge pull request #68 from pytest-dev/fix-build
  • 8b2bccb Update CHANGELOG and automate releases
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=iniconfig&package-manager=pip&previous-version=2.1.0&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index c86f076606b..68ac13c2813 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -94,7 +94,7 @@ idna==3.10 # yarl imagesize==1.4.1 # via sphinx -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 24e94a7cc4e..47b51623384 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -92,7 +92,7 @@ idna==3.10 # yarl imagesize==1.4.1 # via sphinx -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via diff --git a/requirements/lint.txt b/requirements/lint.txt index be80aa83074..2c38646da0e 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -39,7 +39,7 @@ identify==2.6.15 # via pre-commit idna==3.10 # via trustme -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.7.2 # via -r requirements/lint.in diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 0146b312858..4b1f21f4ed1 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -30,7 +30,7 @@ freezegun==1.5.5 # via -r requirements/test-common.in idna==3.10 # via trustme -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 8ab031bf707..b1da6d21c94 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -53,7 +53,7 @@ idna==3.10 # via # trustme # yarl -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.8.0 ; python_version < "3.14" # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 86d0a85d965..1b7fd2c325b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -53,7 +53,7 @@ idna==3.10 # via # trustme # yarl -iniconfig==2.1.0 +iniconfig==2.3.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in From 95292793946002ab62b3e95efb25b96252aab980 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 12:09:01 +0000 Subject: [PATCH 05/21] Bump pydantic from 2.12.2 to 2.12.3 (#11693) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.12.2 to 2.12.3.
Release notes

Sourced from pydantic's releases.

v2.12.3 2025-10-17

v2.12.3 (2025-10-17)

What's Changed

This is the third 2.13 patch release, fixing issues related to the FieldInfo class, and reverting a change to the supported after model validator function signatures.

  • Raise a warning when an invalid after model validator function signature is raised by @​Viicos in #12414. Starting in 2.12.0, using class methods for after model validators raised an error, but the error wasn't raised concistently. We decided to emit a deprecation warning instead.
  • Add FieldInfo.asdict() method, improve documentation around FieldInfo by @​Viicos in #12411. This also adds back support for mutations on FieldInfo classes, that are reused as Annotated metadata. However, note that this is still not a supported pattern. Instead, please refer to the added example in the documentation.

The blog post section on changes was also updated to document the changes related to serialize_as_any.

Full Changelog: https://github.com/pydantic/pydantic/compare/v2.12.2...v2.12.3

Changelog

Sourced from pydantic's changelog.

v2.12.3 (2025-10-17)

GitHub release

What's Changed

This is the third 2.13 patch release, fixing issues related to the FieldInfo class, and reverting a change to the supported after model validator function signatures.

  • Raise a warning when an invalid after model validator function signature is raised by @​Viicos in #12414. Starting in 2.12.0, using class methods for after model validators raised an error, but the error wasn't raised concistently. We decided to emit a deprecation warning instead.
  • Add FieldInfo.asdict() method, improve documentation around FieldInfo by @​Viicos in #12411. This also add back support for mutations on FieldInfo classes, that are reused as Annotated metadata. However, note that this is still not a supported pattern. Instead, please refer to the added example in the documentation.

The blog post section on changes was also updated to document the changes related to serialize_as_any.

Commits
  • 1a8850d Prepare release 2.12.3
  • 09dbcf2 Add FieldInfo.asdict() method, improve documentation around FieldInfo
  • 5da4331 Improve documentation about serialize as any behavior
  • 9c86324 Raise a warning when an invalid after model validator function signature is r...
  • 36a73c6 Update pydantic-extra-types dependency to version >=2.10.6
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.12.2&new-version=2.12.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 68ac13c2813..73db32d0fff 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -153,7 +153,7 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic diff --git a/requirements/dev.txt b/requirements/dev.txt index 47b51623384..c93523e0b03 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -150,7 +150,7 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic diff --git a/requirements/lint.txt b/requirements/lint.txt index 2c38646da0e..1a34a990539 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -67,7 +67,7 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 4b1f21f4ed1..2eef6a22c0f 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -56,7 +56,7 @@ proxy-py==2.4.10 # via -r requirements/test-common.in pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index b1da6d21c94..c99bea5ef63 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -91,7 +91,7 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic diff --git a/requirements/test.txt b/requirements/test.txt index 1b7fd2c325b..a7a1104c557 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -91,7 +91,7 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.2 +pydantic==2.12.3 # via python-on-whales pydantic-core==2.41.4 # via pydantic From 0354bf1b5b7848c26193ad84e93afcb2835f8bbd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 12:16:03 +0000 Subject: [PATCH 06/21] Bump cython from 3.1.4 to 3.1.5 (#11694) Bumps [cython](https://github.com/cython/cython) from 3.1.4 to 3.1.5.
Release notes

Sourced from cython's releases.

3.1.5 (2025-10-19)

Bugs fixed

  • Conversion from C++ strings longer than PY_SSIZE_T_MAX did not validate the length.

  • Some non-Limited API code was incorrectly used in generated header files. (Github issue :issue:7157)

  • Optimised unpacking of Python integers in expressions uses a slightly safer scheme. (Github issue :issue:7134)

  • Empty return statements were not always reported when tracing. (Github issue :issue:7022)

  • Value conversion errors when tracing C return statements no longer fail the trace but fall back to reporting None returns instead. (Github issue :issue:6503)

Changelog

Sourced from cython's changelog.

3.1.5 (2025-10-20)

Bugs fixed

  • Conversion from C++ strings longer than PY_SSIZE_T_MAX did not validate the length.

  • Some non-Limited API code was incorrectly used in generated header files. (Github issue :issue:7157)

  • Optimised unpacking of Python integers in expressions uses a slightly safer scheme. (Github issue :issue:7134)

  • Empty return statements were not always reported when tracing. (Github issue :issue:7022)

  • Value conversion errors when tracing C return statements no longer fail the trace but fall back to reporting None returns instead. (Github issue :issue:6503)

Commits
  • 24eb0d5 Build: Move release step to separate job to enforce complete releases and cut...
  • 2dc58ee Update changelog.
  • 5f1fa05 Fix CI for Python 3.14.0 (GH-7197)
  • acc3d7c Adapt some tests to different error messages in Py3.15.
  • 9143822 Replace outdated "codecs.open()" usage with just "open()".
  • bbe4838 Prepare release of 3.1.5.
  • 3bc285d Upgrade cibuildwheel to latest 3.2.1.
  • 729dd6c Update changelog.
  • c4da583 Allow TraceReturnCValue conversion to fail in non-monitoring (#7064)
  • dd66438 Fix tracing of empty return statements (GH-7067)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.1.4&new-version=3.1.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 73db32d0fff..b5cb9e7462d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -61,7 +61,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -cython==3.1.4 +cython==3.1.5 # via -r requirements/cython.in distlib==0.4.0 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 9f3e87f8ea7..7d87371c03d 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.1.4 +cython==3.1.5 # via -r requirements/cython.in multidict==6.7.0 # via -r requirements/multidict.in From d7c3e1f1ca308af0350a47753f43b11fec2de28d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Oct 2025 10:47:15 +0000 Subject: [PATCH 07/21] Bump sigstore/gh-action-sigstore-python from 3.0.1 to 3.1.0 (#11699) Bumps [sigstore/gh-action-sigstore-python](https://github.com/sigstore/gh-action-sigstore-python) from 3.0.1 to 3.1.0.
Release notes

Sourced from sigstore/gh-action-sigstore-python's releases.

v3.1.0

gh-action-sigstore-python is now compatible with Rekor v2 transparency log (but produced signature bundles still contain Rekor v1 entries by default).

Changed

  • The action now uses sigstore-python 4.1. All other dependencies are also updated (#220)

Fixed

  • Fixed incompatibility with Python 3.14 by upgrading dependencies (#225)

Added

  • rekor-version argument was added to control the Rekor transparency log version when signing. The default version in the gh-action-sigstore-python 3.x series will remain 1 (except when using staging: true). (#228)
Changelog

Sourced from sigstore/gh-action-sigstore-python's changelog.

[3.1.0]

gh-action-sigstore-python is now compatible with Rekor v2 transparency log (but produced signature bundles still contain Rekor v1 entries by default).

Changed

  • The action now uses sigstore-python 4.1. All other dependencies are also updated (#220)

Fixed

  • Fixed incompatibility with Python 3.14 by upgrading dependencies (#225)

Added

  • rekor-version argument was added to control the Rekor transparency log version when signing. The default version in the gh-action-sigstore-python 3.x series will remain 1 (except when using staging: true). (#228)
Commits
  • f832326 Prepare 3.1.0 release (#230)
  • 3385d3a build(deps): bump astral-sh/setup-uv in the actions group (#232)
  • 35fff1e Add rekor-version argument (#228)
  • be60bbe build(deps): bump github/codeql-action in the actions group (#231)
  • 72e7431 Actually upgrade dependencies (#225)
  • ccdc279 ci, action: address zizmor findings, bump versions (#222)
  • 709f8a4 build(deps): bump sigstore from 3.6.3 to 4.0.0 (#220)
  • 5ce4031 requirements: Include main.in contents within dev.in (#221)
  • ea888ad build(deps): bump the actions group with 3 updates (#218)
  • 17565e2 build(deps): bump the python-dependencies group with 6 updates (#219)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=sigstore/gh-action-sigstore-python&package-manager=github_actions&previous-version=3.0.1&new-version=3.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 881bc61cf3f..e21cb46a4e5 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -489,7 +489,7 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v3.0.1 + uses: sigstore/gh-action-sigstore-python@v3.1.0 with: inputs: >- ./dist/*.tar.gz From 231a172f4d705db2e22dc01acab6377a4028c5c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Oct 2025 10:52:08 +0000 Subject: [PATCH 08/21] Bump regex from 2025.9.18 to 2025.10.23 (#11702) Bumps [regex](https://github.com/mrabarnett/mrab-regex) from 2025.9.18 to 2025.10.23.
Changelog

Sourced from regex's changelog.

Version: 2025.10.23

'setup.py' was missing from the source distribution.

Version: 2025.10.22

Fixed test in main.yml.

Version: 2025.10.21

Moved tests into subfolder.

Version: 2025.10.20

Re-organised files.

Updated to Unicode 17.0.0.

Version: 2025.9.20

Enable free-threading support in cibuildwheel in another
place.

Version: 2025.9.19

Enable free-threading support in cibuildwheel.

Version: 2025.9.18

Git issue 565: Support the free-threaded build of CPython
3.13

Version: 2025.9.1

Git PR 585: Fix AttributeError: 'AnyAll' object has no
attribute '_key'

Version: 2025.8.29

Git issue 584: AttributeError: 'AnyAll' object has no
attribute 'positive'

Version: 2025.7.34

Git issue 575: Issues with ASCII/Unicode modifiers

Version: 2025.7.33

Updated main.yml and pyproject.toml.

Version: 2025.7.32

Git issue 580: Regression in v2025.7.31: \P{L} no longer
matches in simple patterns

... (truncated)

Commits
  • 26454da 'setup.py' was missing from the source distribution.
  • f2d31b0 Fixed test in main.yml.
  • 2fd4dc0 Moved tests into subfolder.
  • 54fa96c Re-organised files.
  • 4359a6a Enable free-threading support in cibuildwheel in another place.
  • 2fe1139 Enable free-threading support in cibuildwheel.
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=regex&package-manager=pip&previous-version=2025.9.18&new-version=2025.10.23)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b5cb9e7462d..a37960e9a7a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -198,7 +198,7 @@ pyyaml==6.0.3 # via pre-commit re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.9.18 +regex==2025.10.23 # via re-assert requests==2.32.5 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index c93523e0b03..51654015cb7 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -193,7 +193,7 @@ pyyaml==6.0.3 # via pre-commit re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.9.18 +regex==2025.10.23 # via re-assert requests==2.32.5 # via sphinx diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 2eef6a22c0f..ba82d53bc8a 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -85,7 +85,7 @@ python-on-whales==0.78.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.9.18 +regex==2025.10.23 # via re-assert rich==14.2.0 # via pytest-codspeed diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index c99bea5ef63..ce643619379 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -120,7 +120,7 @@ python-on-whales==0.78.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.9.18 +regex==2025.10.23 # via re-assert rich==14.2.0 # via pytest-codspeed diff --git a/requirements/test.txt b/requirements/test.txt index a7a1104c557..836d88dac45 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -120,7 +120,7 @@ python-on-whales==0.78.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in -regex==2025.9.18 +regex==2025.10.23 # via re-assert rich==14.2.0 # via pytest-codspeed From 442d38fe641c47bf0d9d0ffe8a388b8f4453e1a8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 10:52:06 +0000 Subject: [PATCH 09/21] Bump cython from 3.1.5 to 3.1.6 (#11710) Bumps [cython](https://github.com/cython/cython) from 3.1.5 to 3.1.6.
Changelog

Sourced from cython's changelog.

3.1.6 (2025-10-23)

Bugs fixed

  • Unicode characters formatted from C integers with f"{value:c}" could result in invalid Python string objects since Cython 3.1.0. (Github issue :issue:7240)

  • cythonize (program and function) now uses concurrent.futures.ProcessPoolExecutor instead of multiprocessing.Pool to fix a hang on build failures in parallel builds. A possible work-around is to disable parallel builds. Patch by Sviatoslav Sydorenko. (Github issue :issue:7183)

Commits
  • b40c676 Prepare release of 3.1.6.
  • 2c00332 Build: Clear "license-files" option in setup metadata due to
  • 6d0b9bd Build: Use twine to check the wheels before uploading them.
  • fecd0e5 Silence a C coercion warning in a test.
  • 2a68bbc Avoid back-and-forth between signed and unsigned index types in C array conve...
  • 1824975 Silence a C compiler warning about implicit signed/unsigned conversion.
  • a583bc3 Disable parallelism in cythonize command/function on spawn (GH-7183)
  • 2ee0bc1 Update changelog.
  • b0f7b40 Fix f-string joining when non-ASCII "{intvalue:c}" character formats are invo...
  • 67351ba Adapt release date of 3.1.5 to PyPI release time.
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cython&package-manager=pip&previous-version=3.1.5&new-version=3.1.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a37960e9a7a..8faa9a75f0d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -61,7 +61,7 @@ coverage==7.11.0 # pytest-cov cryptography==46.0.3 # via trustme -cython==3.1.5 +cython==3.1.6 # via -r requirements/cython.in distlib==0.4.0 # via virtualenv diff --git a/requirements/cython.txt b/requirements/cython.txt index 7d87371c03d..05f39110b33 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/cython.txt --resolver=backtracking --strip-extras requirements/cython.in # -cython==3.1.5 +cython==3.1.6 # via -r requirements/cython.in multidict==6.7.0 # via -r requirements/multidict.in From 1d6513a3acbb84fa0c463162109abf9a8d750ced Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 10:54:10 +0000 Subject: [PATCH 10/21] Bump pytest-codspeed from 4.1.1 to 4.2.0 (#11711) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 4.1.1 to 4.2.0.
Release notes

Sourced from pytest-codspeed's releases.

v4.2.0

What's Changed

Full Changelog: https://github.com/CodSpeedHQ/pytest-codspeed/compare/v4.1.1...v4.2.0

Changelog

Sourced from pytest-codspeed's changelog.

[4.2.0] - 2025-10-24

🚀 Features

🐛 Bug Fixes

  • Throw error when instrument hooks failed to initialize under codspeed env by @​adriencaccia

⚙️ Internals

Commits
  • 3b0649b Release v4.2.0 🚀
  • 1af4399 feat: support python 3.14
  • 2e8a7a8 fix(valgrind): throw error when instrument hooks failed to initialize under c...
  • bf30b24 chore: bump mypy
  • f1e42e4 chore(ci): bump python to 3.14 in the ci
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=4.1.1&new-version=4.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8faa9a75f0d..bc75c0e3f99 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -176,7 +176,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 51654015cb7..9f867e55cf0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -171,7 +171,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 1a34a990539..ccb4e1a3d73 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -80,7 +80,7 @@ pytest==8.4.2 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via -r requirements/lint.in pytest-mock==3.15.1 # via -r requirements/lint.in diff --git a/requirements/test-common.txt b/requirements/test-common.txt index ba82d53bc8a..6fd6a5395cc 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -71,7 +71,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index ce643619379..fa97a4464d2 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -106,7 +106,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 836d88dac45..550cc05609b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -106,7 +106,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.1.1 +pytest-codspeed==4.2.0 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in From 322d177f358deb40ce8f8c2de84426d27296616f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Oct 2025 11:03:29 +0000 Subject: [PATCH 11/21] Bump python-on-whales from 0.78.0 to 0.79.0 (#11712) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [python-on-whales](https://github.com/gabrieldemarmiesse/python-on-whales) from 0.78.0 to 0.79.0.
Release notes

Sourced from python-on-whales's releases.

v0.79.0

What's Changed

New Contributors

Thank you for your contributions! This makes me very happy to see new faces!

Full Changelog: https://github.com/gabrieldemarmiesse/python-on-whales/compare/v0.78.0...v0.79.0

Commits
  • 4978524 Bump python-on-whales version to 0.79.0
  • d39aaee Add metadata_file opt-in for buildx builds (#689)
  • b694f85 Improve typing of run by using @overload (#685)
  • 488ab4a [Docs] Enable Syntax Highlighting (#686)
  • 88eae67 feat : Add support for Docker service networks in ComposeService (#683)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=python-on-whales&package-manager=pip&previous-version=0.78.0&new-version=0.79.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index bc75c0e3f99..7f86fbe5266 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -190,7 +190,7 @@ pytest-xdist==3.8.0 # via -r requirements/test-common.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 9f867e55cf0..772bd1154e4 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -185,7 +185,7 @@ pytest-xdist==3.8.0 # via -r requirements/test-common.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index ccb4e1a3d73..222601fadb4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -86,7 +86,7 @@ pytest-mock==3.15.1 # via -r requirements/lint.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via -r requirements/lint.in pyyaml==6.0.3 # via pre-commit diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 6fd6a5395cc..7dbad4a84aa 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -81,7 +81,7 @@ pytest-xdist==3.8.0 # via -r requirements/test-common.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index fa97a4464d2..b85c0acc12c 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -116,7 +116,7 @@ pytest-xdist==3.8.0 # via -r requirements/test-common.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 550cc05609b..02fe687bca1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -116,7 +116,7 @@ pytest-xdist==3.8.0 # via -r requirements/test-common.in python-dateutil==2.9.0.post0 # via freezegun -python-on-whales==0.78.0 +python-on-whales==0.79.0 # via -r requirements/test-common.in re-assert==1.1.0 # via -r requirements/test-common.in From b4d9b89f23742ed7a240586cbf8a59b7bfd13c9c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 12:14:07 +0000 Subject: [PATCH 12/21] Bump actions/download-artifact from 5 to 6 (#11721) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 5 to 6.
Release notes

Sourced from actions/download-artifact's releases.

v6.0.0

What's Changed

BREAKING CHANGE: this update supports Node v24.x. This is not a breaking change per-se but we're treating it as such.

New Contributors

Full Changelog: https://github.com/actions/download-artifact/compare/v5...v6.0.0

Commits
  • 018cc2c Merge pull request #438 from actions/danwkennedy/prepare-6.0.0
  • 815651c Revert "Remove github.dep.yml"
  • bb3a066 Remove github.dep.yml
  • fa1ce46 Prepare v6.0.0
  • 4a24838 Merge pull request #431 from danwkennedy/patch-1
  • 5e3251c Readme: spell out the first use of GHES
  • abefc31 Merge pull request #424 from actions/yacaovsnc/update_readme
  • ac43a60 Update README with artifact extraction details
  • de96f46 Merge pull request #417 from actions/yacaovsnc/update_readme
  • 7993cb4 Remove migration guide for artifact download changes
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/download-artifact&package-manager=github_actions&previous-version=5&new-version=6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index e21cb46a4e5..83003b1eaa8 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -179,7 +179,7 @@ jobs: run: echo "PYTHON_GIL=0" >> $GITHUB_ENV - name: Restore llhttp generated files if: ${{ matrix.no-extensions == '' }} - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: llhttp path: vendor/llhttp/build/ @@ -266,7 +266,7 @@ jobs: run: | python -m pip install -r requirements/test.in -c requirements/test.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: llhttp path: vendor/llhttp/build/ @@ -328,7 +328,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: llhttp path: vendor/llhttp/build/ @@ -418,7 +418,7 @@ jobs: python -m pip install -r requirements/cython.in -c requirements/cython.txt - name: Restore llhttp generated files - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: llhttp path: vendor/llhttp/build/ @@ -463,7 +463,7 @@ jobs: run: | echo "${{ secrets.GITHUB_TOKEN }}" | gh auth login --with-token - name: Download distributions - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: path: dist pattern: dist-* From 6d22d3a46ec18c846b77a536956dcae9eba50d7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 12:21:35 +0000 Subject: [PATCH 13/21] Bump pip from 25.2 to 25.3 (#11722) Bumps [pip](https://github.com/pypa/pip) from 25.2 to 25.3.
Changelog

Sourced from pip's changelog.

25.3 (2025-10-24)

Deprecations and Removals

  • Remove support for the legacy setup.py develop editable method in setuptools editable installs; setuptools >= 64 is now required. ([#11457](https://github.com/pypa/pip/issues/11457) <https://github.com/pypa/pip/issues/11457>_)

  • Remove the deprecated --global-option and --build-option. --config-setting is now the only way to pass options to the build backend. ([#11859](https://github.com/pypa/pip/issues/11859) <https://github.com/pypa/pip/issues/11859>_)

  • Deprecate the PIP_CONSTRAINT environment variable for specifying build constraints.

    Use the --build-constraint option or the PIP_BUILD_CONSTRAINT environment variable instead. When build constraints are used, PIP_CONSTRAINT no longer affects isolated build environments. To enable this behavior without specifying any build constraints, use --use-feature=build-constraint. ([#13534](https://github.com/pypa/pip/issues/13534) <https://github.com/pypa/pip/issues/13534>_)

  • Remove support for non-standard legacy wheel filenames. ([#13581](https://github.com/pypa/pip/issues/13581) <https://github.com/pypa/pip/issues/13581>_)

  • Remove support for the deprecated setup.py bdist_wheel mechanism. Consequently, --use-pep517 is now always on, and --no-use-pep517 has been removed. ([#6334](https://github.com/pypa/pip/issues/6334) <https://github.com/pypa/pip/issues/6334>_)

Features

  • When :pep:658 metadata is available, full distribution files are no longer downloaded when using pip lock or pip install --dry-run. ([#12603](https://github.com/pypa/pip/issues/12603) <https://github.com/pypa/pip/issues/12603>_)
  • Add support for installing an editable requirement written as a Direct URL (PackageName @ URL). ([#13495](https://github.com/pypa/pip/issues/13495) <https://github.com/pypa/pip/issues/13495>_)
  • Add support for build constraints via the --build-constraint option. This allows constraining the versions of packages used during the build process (e.g., setuptools) without affecting the final installation. ([#13534](https://github.com/pypa/pip/issues/13534) <https://github.com/pypa/pip/issues/13534>_)
  • On ResolutionImpossible errors, include a note about causes with no candidates. ([#13588](https://github.com/pypa/pip/issues/13588) <https://github.com/pypa/pip/issues/13588>_)
  • Building pip itself from source now uses flit-core instead of setuptools. This does not affect how pip installs or builds packages you use. ([#13473](https://github.com/pypa/pip/issues/13473) <https://github.com/pypa/pip/issues/13473>_)

Bug Fixes

  • Handle malformed Version metadata entries and show a sensible error message instead of crashing. ([#13443](https://github.com/pypa/pip/issues/13443) <https://github.com/pypa/pip/issues/13443>_)
  • Permit spaces between a filepath and extras in an install requirement. ([#13523](https://github.com/pypa/pip/issues/13523) <https://github.com/pypa/pip/issues/13523>_)
  • Ensure the self-check files in the cache have the same permissions as the rest of the cache. ([#13528](https://github.com/pypa/pip/issues/13528) <https://github.com/pypa/pip/issues/13528>_)
  • Avoid concurrency issues and improve performance when caching locally built wheels, especially when the temporary build directory is on a different filesystem than the cache. The wheel directory passed to the build backend is now a temporary subdirectory inside the cache directory. ([#13540](https://github.com/pypa/pip/issues/13540) <https://github.com/pypa/pip/issues/13540>_)
  • Include relevant user-supplied constraints in logs when reporting dependency conflicts. ([#13545](https://github.com/pypa/pip/issues/13545) <https://github.com/pypa/pip/issues/13545>_)
  • Fix a regression in configuration parsing that was turning a single value into a list and thus leading to a validation error. ([#13548](https://github.com/pypa/pip/issues/13548) <https://github.com/pypa/pip/issues/13548>_)
  • For Python versions that do not support :pep:706, pip will now raise an installation error for a source distribution when it includes a symlink that points outside the source distribution archive. ([#13550](https://github.com/pypa/pip/issues/13550) <https://github.com/pypa/pip/issues/13550>_)
  • Prevent --user installs if site.ENABLE_USER_SITE is set to False. ([#8794](https://github.com/pypa/pip/issues/8794) <https://github.com/pypa/pip/issues/8794>_)

... (truncated)

Commits
  • a520693 Bump for release
  • 0f2973e Fix up authors by adding entry to .mailmap
  • 87828dc Update AUTHORS.txt
  • ce6a38c Merge pull request #13628 from sbidoul/imp-doc-pep517-sbi
  • ee16c81 Merge pull request #13629 from notatallshaw/bump-gone_in="25.3"
  • 3e227aa Bump gone_in="25.3"
  • 4ad1828 Merge pull request #13495 from ichard26/feat/direct-editables
  • 66ded3b Merge pull request #13570 from ShubhamNagure/fix-constraint-reporting-13545
  • 67e8ac2 Merge pull request #13588 from notatallshaw/hint-on-resolution-impossible-whe...
  • 990ca8a Merge pull request #8796 from pelson/honour_user_site
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pip&package-manager=pip&previous-version=25.2&new-version=25.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 7f86fbe5266..73197640bfe 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -289,7 +289,7 @@ zlib-ng==1.0.0 # -r requirements/test-common.in # The following packages are considered to be unsafe in a requirements file: -pip==25.2 +pip==25.3 # via pip-tools setuptools==80.9.0 # via pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 772bd1154e4..89cca196547 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -279,7 +279,7 @@ zlib-ng==1.0.0 # -r requirements/test-common.in # The following packages are considered to be unsafe in a requirements file: -pip==25.2 +pip==25.3 # via pip-tools setuptools==80.9.0 # via pip-tools From 57ad7fa3113ffb1ca7017da36587b667ec0f5d3d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 16:03:28 +0000 Subject: [PATCH 14/21] [PR #11714/0d77d0d6 backport][3.14] Fix loading netrc when NETRC env var is not set (#11728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: J. Nick Koston Co-authored-by: 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> fixes #11713 --- CHANGES/11713.bugfix.rst | 1 + CHANGES/11714.bugfix.rst | 1 + aiohttp/client.py | 9 +-------- tests/conftest.py | 18 ++++++++++++++++++ tests/test_client_functional.py | 18 ++++++++++++++++-- tests/test_client_session.py | 15 +++++++++++++-- 6 files changed, 50 insertions(+), 12 deletions(-) create mode 100644 CHANGES/11713.bugfix.rst create mode 120000 CHANGES/11714.bugfix.rst diff --git a/CHANGES/11713.bugfix.rst b/CHANGES/11713.bugfix.rst new file mode 100644 index 00000000000..dbb45a5254f --- /dev/null +++ b/CHANGES/11713.bugfix.rst @@ -0,0 +1 @@ +Fixed loading netrc credentials from the default :file:`~/.netrc` (:file:`~/_netrc` on Windows) location when the :envvar:`NETRC` environment variable is not set -- by :user:`bdraco`. diff --git a/CHANGES/11714.bugfix.rst b/CHANGES/11714.bugfix.rst new file mode 120000 index 00000000000..5a506f1ded3 --- /dev/null +++ b/CHANGES/11714.bugfix.rst @@ -0,0 +1 @@ +11713.bugfix.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index b99f834d0bc..8d2c3d67921 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -645,14 +645,7 @@ async def _request( auth = self._default_auth # Try netrc if auth is still None and trust_env is enabled. - # Only check if NETRC environment variable is set to avoid - # creating an expensive executor job unnecessarily. - if ( - auth is None - and self._trust_env - and url.host is not None - and os.environ.get("NETRC") - ): + if auth is None and self._trust_env and url.host is not None: auth = await self._loop.run_in_executor( None, self._get_netrc_auth, url.host ) diff --git a/tests/conftest.py b/tests/conftest.py index bde9500f129..1a7be393358 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import asyncio import base64 import os +import platform import socket import ssl import sys @@ -309,6 +310,23 @@ def netrc_other_host(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: return netrc_file +@pytest.fixture +def netrc_home_directory(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a netrc file in a mocked home directory without setting NETRC env var.""" + home_dir = tmp_path / "home" + home_dir.mkdir() + netrc_filename = "_netrc" if platform.system() == "Windows" else ".netrc" + netrc_file = home_dir / netrc_filename + netrc_file.write_text("default login netrc_user password netrc_pass\n") + + home_env_var = "USERPROFILE" if platform.system() == "Windows" else "HOME" + monkeypatch.setenv(home_env_var, str(home_dir)) + # Ensure NETRC env var is not set + monkeypatch.delenv("NETRC", raising=False) + + return netrc_file + + @pytest.fixture def start_connection() -> Iterator[mock.Mock]: with mock.patch( diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 7ff53719146..4cf18b9e5ed 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3777,12 +3777,12 @@ async def test_netrc_auth_from_env( # type: ignore[misc] @pytest.mark.usefixtures("no_netrc") -async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] +async def test_netrc_auth_skipped_without_netrc_file( # type: ignore[misc] headers_echo_client: Callable[ ..., Awaitable[TestClient[web.Request, web.Application]] ], ) -> None: - """Test that netrc authentication is skipped when NETRC env var is not set.""" + """Test that netrc authentication is skipped when no netrc file exists.""" client = await headers_echo_client(trust_env=True) async with client.get("/") as r: assert r.status == 200 @@ -3791,6 +3791,20 @@ async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] assert "Authorization" not in content["headers"] +@pytest.mark.usefixtures("netrc_home_directory") +async def test_netrc_auth_from_home_directory( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication works from default ~/.netrc without NETRC env var.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + assert content["headers"]["Authorization"] == "Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + @pytest.mark.usefixtures("netrc_default_contents") async def test_netrc_auth_overridden_by_explicit_auth( # type: ignore[misc] headers_echo_client: Callable[ diff --git a/tests/test_client_session.py b/tests/test_client_session.py index ade8a67b7ca..7ab98c2bee4 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -1366,8 +1366,8 @@ async def test_netrc_auth_skipped_without_trust_env(auth_server: TestServer) -> @pytest.mark.usefixtures("no_netrc") -async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> None: - """Test that netrc authentication is skipped when NETRC env var is not set.""" +async def test_netrc_auth_skipped_without_netrc_file(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when no netrc file exists.""" async with ( ClientSession(trust_env=True) as session, session.get(auth_server.make_url("/")) as resp, @@ -1376,6 +1376,17 @@ async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> assert text == "no_auth" +@pytest.mark.usefixtures("netrc_home_directory") +async def test_netrc_auth_from_home_directory(auth_server: TestServer) -> None: + """Test that netrc authentication works from default ~/.netrc location without NETRC env var.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "auth:Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + @pytest.mark.usefixtures("netrc_default_contents") async def test_netrc_auth_overridden_by_explicit_auth(auth_server: TestServer) -> None: """Test that explicit auth parameter overrides netrc authentication.""" From b734e0434731e11360fec74031f3deed02ee4717 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 16:03:34 +0000 Subject: [PATCH 15/21] [PR #11714/0d77d0d6 backport][3.13] Fix loading netrc when NETRC env var is not set (#11727) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: J. Nick Koston Co-authored-by: 🇺🇦 Sviatoslav Sydorenko (Святослав Сидоренко) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> fixes #11713 --- CHANGES/11713.bugfix.rst | 1 + CHANGES/11714.bugfix.rst | 1 + aiohttp/client.py | 9 +-------- tests/conftest.py | 18 ++++++++++++++++++ tests/test_client_functional.py | 18 ++++++++++++++++-- tests/test_client_session.py | 15 +++++++++++++-- 6 files changed, 50 insertions(+), 12 deletions(-) create mode 100644 CHANGES/11713.bugfix.rst create mode 120000 CHANGES/11714.bugfix.rst diff --git a/CHANGES/11713.bugfix.rst b/CHANGES/11713.bugfix.rst new file mode 100644 index 00000000000..dbb45a5254f --- /dev/null +++ b/CHANGES/11713.bugfix.rst @@ -0,0 +1 @@ +Fixed loading netrc credentials from the default :file:`~/.netrc` (:file:`~/_netrc` on Windows) location when the :envvar:`NETRC` environment variable is not set -- by :user:`bdraco`. diff --git a/CHANGES/11714.bugfix.rst b/CHANGES/11714.bugfix.rst new file mode 120000 index 00000000000..5a506f1ded3 --- /dev/null +++ b/CHANGES/11714.bugfix.rst @@ -0,0 +1 @@ +11713.bugfix.rst \ No newline at end of file diff --git a/aiohttp/client.py b/aiohttp/client.py index fb8287fc23c..bc4ee17caf0 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -661,14 +661,7 @@ async def _request( auth = self._default_auth # Try netrc if auth is still None and trust_env is enabled. - # Only check if NETRC environment variable is set to avoid - # creating an expensive executor job unnecessarily. - if ( - auth is None - and self._trust_env - and url.host is not None - and os.environ.get("NETRC") - ): + if auth is None and self._trust_env and url.host is not None: auth = await self._loop.run_in_executor( None, self._get_netrc_auth, url.host ) diff --git a/tests/conftest.py b/tests/conftest.py index 62efff55f86..a4dba76efa6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import asyncio import base64 import os +import platform import socket import ssl import sys @@ -308,6 +309,23 @@ def netrc_other_host(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: return netrc_file +@pytest.fixture +def netrc_home_directory(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a netrc file in a mocked home directory without setting NETRC env var.""" + home_dir = tmp_path / "home" + home_dir.mkdir() + netrc_filename = "_netrc" if platform.system() == "Windows" else ".netrc" + netrc_file = home_dir / netrc_filename + netrc_file.write_text("default login netrc_user password netrc_pass\n") + + home_env_var = "USERPROFILE" if platform.system() == "Windows" else "HOME" + monkeypatch.setenv(home_env_var, str(home_dir)) + # Ensure NETRC env var is not set + monkeypatch.delenv("NETRC", raising=False) + + return netrc_file + + @pytest.fixture def start_connection() -> Iterator[mock.Mock]: with mock.patch( diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 92ba1cf5204..34cc69f88a7 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -3785,12 +3785,12 @@ async def test_netrc_auth_from_env( # type: ignore[misc] @pytest.mark.usefixtures("no_netrc") -async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] +async def test_netrc_auth_skipped_without_netrc_file( # type: ignore[misc] headers_echo_client: Callable[ ..., Awaitable[TestClient[web.Request, web.Application]] ], ) -> None: - """Test that netrc authentication is skipped when NETRC env var is not set.""" + """Test that netrc authentication is skipped when no netrc file exists.""" client = await headers_echo_client(trust_env=True) async with client.get("/") as r: assert r.status == 200 @@ -3799,6 +3799,20 @@ async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] assert "Authorization" not in content["headers"] +@pytest.mark.usefixtures("netrc_home_directory") +async def test_netrc_auth_from_home_directory( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication works from default ~/.netrc without NETRC env var.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + assert content["headers"]["Authorization"] == "Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + @pytest.mark.usefixtures("netrc_default_contents") async def test_netrc_auth_overridden_by_explicit_auth( # type: ignore[misc] headers_echo_client: Callable[ diff --git a/tests/test_client_session.py b/tests/test_client_session.py index 8b148e742c5..5d017c8d0ba 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -1365,8 +1365,8 @@ async def test_netrc_auth_skipped_without_trust_env(auth_server: TestServer) -> @pytest.mark.usefixtures("no_netrc") -async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> None: - """Test that netrc authentication is skipped when NETRC env var is not set.""" +async def test_netrc_auth_skipped_without_netrc_file(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when no netrc file exists.""" async with ( ClientSession(trust_env=True) as session, session.get(auth_server.make_url("/")) as resp, @@ -1375,6 +1375,17 @@ async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> assert text == "no_auth" +@pytest.mark.usefixtures("netrc_home_directory") +async def test_netrc_auth_from_home_directory(auth_server: TestServer) -> None: + """Test that netrc authentication works from default ~/.netrc location without NETRC env var.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "auth:Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + @pytest.mark.usefixtures("netrc_default_contents") async def test_netrc_auth_overridden_by_explicit_auth(auth_server: TestServer) -> None: """Test that explicit auth parameter overrides netrc authentication.""" From 990c6b4ceb327d981fe766f039340f7e1c526522 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 12:09:45 -0500 Subject: [PATCH 16/21] [PR #11724/82ce525b backport][3.14] Ensure cookies are still parsed after a malformed cookie (#11730) --- CHANGES/11632.bugfix.rst | 1 + aiohttp/_cookie_helpers.py | 35 ++++++++- docs/spelling_wordlist.txt | 1 + tests/test_cookie_helpers.py | 137 ++++++++++++++++++++++++++++++++++- 4 files changed, 170 insertions(+), 4 deletions(-) create mode 100644 CHANGES/11632.bugfix.rst diff --git a/CHANGES/11632.bugfix.rst b/CHANGES/11632.bugfix.rst new file mode 100644 index 00000000000..c07bfb2b1f7 --- /dev/null +++ b/CHANGES/11632.bugfix.rst @@ -0,0 +1 @@ +Fixed cookie parser to continue parsing subsequent cookies when encountering a malformed cookie that fails regex validation, such as Google's ``g_state`` cookie with unescaped quotes -- by :user:`bdraco`. diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 7fe8f43d12b..6628a3c5d0c 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -166,7 +166,10 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: attribute names (like 'path' or 'secure') should be treated as cookies. This parser uses the same regex-based approach as parse_set_cookie_headers - to properly handle quoted values that may contain semicolons. + to properly handle quoted values that may contain semicolons. When the + regex fails to match a malformed cookie, it falls back to simple parsing + to ensure subsequent cookies are not lost + https://github.com/aio-libs/aiohttp/issues/11632 Args: header: The Cookie header value to parse @@ -178,6 +181,7 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: return [] cookies: list[tuple[str, Morsel[str]]] = [] + morsel: Morsel[str] i = 0 n = len(header) @@ -185,7 +189,32 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: # Use the same pattern as parse_set_cookie_headers to find cookies match = _COOKIE_PATTERN.match(header, i) if not match: - break + # Fallback for malformed cookies https://github.com/aio-libs/aiohttp/issues/11632 + # Find next semicolon to skip or attempt simple key=value parsing + next_semi = header.find(";", i) + eq_pos = header.find("=", i) + + # Try to extract key=value if '=' comes before ';' + if eq_pos != -1 and (next_semi == -1 or eq_pos < next_semi): + end_pos = next_semi if next_semi != -1 else n + key = header[i:eq_pos].strip() + value = header[eq_pos + 1 : end_pos].strip() + + # Validate the name (same as regex path) + if not _COOKIE_NAME_RE.match(key): + internal_logger.warning( + "Can not load cookie: Illegal cookie name %r", key + ) + else: + morsel = Morsel() + morsel.__setstate__( # type: ignore[attr-defined] + {"key": key, "value": _unquote(value), "coded_value": value} + ) + cookies.append((key, morsel)) + + # Move to next cookie or end + i = next_semi + 1 if next_semi != -1 else n + continue key = match.group("key") value = match.group("val") or "" @@ -197,7 +226,7 @@ def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: continue # Create new morsel - morsel: Morsel[str] = Morsel() + morsel = Morsel() # Preserve the original value as coded_value (with quotes if present) # We use __setstate__ instead of the public set() API because it allows us to # bypass validation and set already validated state. This is more stable than diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index ec31a3c8806..add105719be 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -343,6 +343,7 @@ un unawaited unclosed undercounting +unescaped unhandled unicode unittest diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 575bbe54d01..577e3156560 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1137,7 +1137,6 @@ def test_parse_cookie_header_empty() -> None: assert parse_cookie_header(" ") == [] -@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11632") def test_parse_cookie_gstate_header() -> None: header = ( "_ga=ga; " @@ -1444,6 +1443,142 @@ def test_parse_cookie_header_illegal_names(caplog: pytest.LogCaptureFixture) -> assert "Can not load cookie: Illegal cookie name 'invalid,cookie'" in caplog.text +def test_parse_cookie_header_large_value() -> None: + """Test that large cookie values don't cause DoS.""" + large_value = "A" * 8192 + header = f"normal=value; large={large_value}; after=cookie" + + result = parse_cookie_header(header) + cookie_names = [name for name, _ in result] + + assert len(result) == 3 + assert "normal" in cookie_names + assert "large" in cookie_names + assert "after" in cookie_names + + large_cookie = next(morsel for name, morsel in result if name == "large") + assert len(large_cookie.value) == 8192 + + +def test_parse_cookie_header_multiple_equals() -> None: + """Test handling of multiple equals signs in cookie values.""" + header = "session=abc123; data=key1=val1&key2=val2; token=xyz" + + result = parse_cookie_header(header) + + assert len(result) == 3 + + name1, morsel1 = result[0] + assert name1 == "session" + assert morsel1.value == "abc123" + + name2, morsel2 = result[1] + assert name2 == "data" + assert morsel2.value == "key1=val1&key2=val2" + + name3, morsel3 = result[2] + assert name3 == "token" + assert morsel3.value == "xyz" + + +def test_parse_cookie_header_fallback_preserves_subsequent_cookies() -> None: + """Test that fallback parser doesn't lose subsequent cookies.""" + header = 'normal=value; malformed={"json":"value"}; after1=cookie1; after2=cookie2' + + result = parse_cookie_header(header) + cookie_names = [name for name, _ in result] + + assert len(result) == 4 + assert cookie_names == ["normal", "malformed", "after1", "after2"] + + name1, morsel1 = result[0] + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert morsel2.value == '{"json":"value"}' + + name3, morsel3 = result[2] + assert morsel3.value == "cookie1" + + name4, morsel4 = result[3] + assert morsel4.value == "cookie2" + + +def test_parse_cookie_header_whitespace_in_fallback() -> None: + """Test that fallback parser handles whitespace correctly.""" + header = "a=1; b = 2 ; c= 3; d =4" + + result = parse_cookie_header(header) + + assert len(result) == 4 + for name, morsel in result: + assert name in ("a", "b", "c", "d") + assert morsel.value in ("1", "2", "3", "4") + + +def test_parse_cookie_header_empty_value_in_fallback() -> None: + """Test that fallback handles empty values correctly.""" + header = "normal=value; empty=; another=test" + + result = parse_cookie_header(header) + + assert len(result) == 3 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "empty" + assert morsel2.value == "" + + name3, morsel3 = result[2] + assert name3 == "another" + assert morsel3.value == "test" + + +def test_parse_cookie_header_invalid_name_in_fallback( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that fallback parser rejects cookies with invalid names.""" + header = 'normal=value; invalid,name={"x":"y"}; another=test' + + result = parse_cookie_header(header) + + assert len(result) == 2 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "another" + assert morsel2.value == "test" + + assert "Can not load cookie: Illegal cookie name 'invalid,name'" in caplog.text + + +def test_parse_cookie_header_empty_key_in_fallback( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that fallback parser logs warning for empty cookie names.""" + header = 'normal=value; ={"malformed":"json"}; another=test' + + result = parse_cookie_header(header) + + assert len(result) == 2 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "another" + assert morsel2.value == "test" + + assert "Can not load cookie: Illegal cookie name ''" in caplog.text + + @pytest.mark.parametrize( ("input_str", "expected"), [ From 95daf0c88c0124cfe87000050efe81be97516d51 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 12:09:55 -0500 Subject: [PATCH 17/21] [PR #11724/82ce525b backport][3.13] Ensure cookies are still parsed after a malformed cookie (#11729) --- CHANGES/11632.bugfix.rst | 1 + aiohttp/_cookie_helpers.py | 35 ++++++++- docs/spelling_wordlist.txt | 1 + tests/test_cookie_helpers.py | 137 ++++++++++++++++++++++++++++++++++- 4 files changed, 170 insertions(+), 4 deletions(-) create mode 100644 CHANGES/11632.bugfix.rst diff --git a/CHANGES/11632.bugfix.rst b/CHANGES/11632.bugfix.rst new file mode 100644 index 00000000000..c07bfb2b1f7 --- /dev/null +++ b/CHANGES/11632.bugfix.rst @@ -0,0 +1 @@ +Fixed cookie parser to continue parsing subsequent cookies when encountering a malformed cookie that fails regex validation, such as Google's ``g_state`` cookie with unescaped quotes -- by :user:`bdraco`. diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 9e80b6065d7..837893e5626 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -165,7 +165,10 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: attribute names (like 'path' or 'secure') should be treated as cookies. This parser uses the same regex-based approach as parse_set_cookie_headers - to properly handle quoted values that may contain semicolons. + to properly handle quoted values that may contain semicolons. When the + regex fails to match a malformed cookie, it falls back to simple parsing + to ensure subsequent cookies are not lost + https://github.com/aio-libs/aiohttp/issues/11632 Args: header: The Cookie header value to parse @@ -177,6 +180,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: return [] cookies: List[Tuple[str, Morsel[str]]] = [] + morsel: Morsel[str] i = 0 n = len(header) @@ -184,7 +188,32 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: # Use the same pattern as parse_set_cookie_headers to find cookies match = _COOKIE_PATTERN.match(header, i) if not match: - break + # Fallback for malformed cookies https://github.com/aio-libs/aiohttp/issues/11632 + # Find next semicolon to skip or attempt simple key=value parsing + next_semi = header.find(";", i) + eq_pos = header.find("=", i) + + # Try to extract key=value if '=' comes before ';' + if eq_pos != -1 and (next_semi == -1 or eq_pos < next_semi): + end_pos = next_semi if next_semi != -1 else n + key = header[i:eq_pos].strip() + value = header[eq_pos + 1 : end_pos].strip() + + # Validate the name (same as regex path) + if not _COOKIE_NAME_RE.match(key): + internal_logger.warning( + "Can not load cookie: Illegal cookie name %r", key + ) + else: + morsel = Morsel() + morsel.__setstate__( # type: ignore[attr-defined] + {"key": key, "value": _unquote(value), "coded_value": value} + ) + cookies.append((key, morsel)) + + # Move to next cookie or end + i = next_semi + 1 if next_semi != -1 else n + continue key = match.group("key") value = match.group("val") or "" @@ -196,7 +225,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: continue # Create new morsel - morsel: Morsel[str] = Morsel() + morsel = Morsel() # Preserve the original value as coded_value (with quotes if present) # We use __setstate__ instead of the public set() API because it allows us to # bypass validation and set already validated state. This is more stable than diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index ec31a3c8806..add105719be 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -343,6 +343,7 @@ un unawaited unclosed undercounting +unescaped unhandled unicode unittest diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 575bbe54d01..577e3156560 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1137,7 +1137,6 @@ def test_parse_cookie_header_empty() -> None: assert parse_cookie_header(" ") == [] -@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11632") def test_parse_cookie_gstate_header() -> None: header = ( "_ga=ga; " @@ -1444,6 +1443,142 @@ def test_parse_cookie_header_illegal_names(caplog: pytest.LogCaptureFixture) -> assert "Can not load cookie: Illegal cookie name 'invalid,cookie'" in caplog.text +def test_parse_cookie_header_large_value() -> None: + """Test that large cookie values don't cause DoS.""" + large_value = "A" * 8192 + header = f"normal=value; large={large_value}; after=cookie" + + result = parse_cookie_header(header) + cookie_names = [name for name, _ in result] + + assert len(result) == 3 + assert "normal" in cookie_names + assert "large" in cookie_names + assert "after" in cookie_names + + large_cookie = next(morsel for name, morsel in result if name == "large") + assert len(large_cookie.value) == 8192 + + +def test_parse_cookie_header_multiple_equals() -> None: + """Test handling of multiple equals signs in cookie values.""" + header = "session=abc123; data=key1=val1&key2=val2; token=xyz" + + result = parse_cookie_header(header) + + assert len(result) == 3 + + name1, morsel1 = result[0] + assert name1 == "session" + assert morsel1.value == "abc123" + + name2, morsel2 = result[1] + assert name2 == "data" + assert morsel2.value == "key1=val1&key2=val2" + + name3, morsel3 = result[2] + assert name3 == "token" + assert morsel3.value == "xyz" + + +def test_parse_cookie_header_fallback_preserves_subsequent_cookies() -> None: + """Test that fallback parser doesn't lose subsequent cookies.""" + header = 'normal=value; malformed={"json":"value"}; after1=cookie1; after2=cookie2' + + result = parse_cookie_header(header) + cookie_names = [name for name, _ in result] + + assert len(result) == 4 + assert cookie_names == ["normal", "malformed", "after1", "after2"] + + name1, morsel1 = result[0] + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert morsel2.value == '{"json":"value"}' + + name3, morsel3 = result[2] + assert morsel3.value == "cookie1" + + name4, morsel4 = result[3] + assert morsel4.value == "cookie2" + + +def test_parse_cookie_header_whitespace_in_fallback() -> None: + """Test that fallback parser handles whitespace correctly.""" + header = "a=1; b = 2 ; c= 3; d =4" + + result = parse_cookie_header(header) + + assert len(result) == 4 + for name, morsel in result: + assert name in ("a", "b", "c", "d") + assert morsel.value in ("1", "2", "3", "4") + + +def test_parse_cookie_header_empty_value_in_fallback() -> None: + """Test that fallback handles empty values correctly.""" + header = "normal=value; empty=; another=test" + + result = parse_cookie_header(header) + + assert len(result) == 3 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "empty" + assert morsel2.value == "" + + name3, morsel3 = result[2] + assert name3 == "another" + assert morsel3.value == "test" + + +def test_parse_cookie_header_invalid_name_in_fallback( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that fallback parser rejects cookies with invalid names.""" + header = 'normal=value; invalid,name={"x":"y"}; another=test' + + result = parse_cookie_header(header) + + assert len(result) == 2 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "another" + assert morsel2.value == "test" + + assert "Can not load cookie: Illegal cookie name 'invalid,name'" in caplog.text + + +def test_parse_cookie_header_empty_key_in_fallback( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that fallback parser logs warning for empty cookie names.""" + header = 'normal=value; ={"malformed":"json"}; another=test' + + result = parse_cookie_header(header) + + assert len(result) == 2 + + name1, morsel1 = result[0] + assert name1 == "normal" + assert morsel1.value == "value" + + name2, morsel2 = result[1] + assert name2 == "another" + assert morsel2.value == "test" + + assert "Can not load cookie: Illegal cookie name ''" in caplog.text + + @pytest.mark.parametrize( ("input_str", "expected"), [ From baf646f642b87a8d770129eae1d7b274f695f94b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 13:14:00 -0500 Subject: [PATCH 18/21] [PR #11726/6cffcfd backport][3.14] Fix WebSocket compressed sends to be cancellation safe (#11732) --- CHANGES/11725.bugfix.rst | 1 + aiohttp/_websocket/writer.py | 196 +++++++++++++++++++++++---------- aiohttp/compression_utils.py | 42 ++++--- docs/spelling_wordlist.txt | 1 + tests/conftest.py | 28 ++++- tests/test_websocket_writer.py | 132 +++++++++++++++++++++- 6 files changed, 328 insertions(+), 72 deletions(-) create mode 100644 CHANGES/11725.bugfix.rst diff --git a/CHANGES/11725.bugfix.rst b/CHANGES/11725.bugfix.rst new file mode 100644 index 00000000000..e78fc054230 --- /dev/null +++ b/CHANGES/11725.bugfix.rst @@ -0,0 +1 @@ +Fixed WebSocket compressed sends to be cancellation safe. Tasks are now shielded during compression to prevent compressor state corruption. This ensures that the stateful compressor remains consistent even when send operations are cancelled -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 6307dba3670..0d5f56f4b81 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -2,8 +2,9 @@ import asyncio import random +import sys from functools import partial -from typing import Any, Final +from typing import Final, Optional, Set from ..base_protocol import BaseProtocol from ..client_exceptions import ClientConnectionResetError @@ -22,14 +23,18 @@ DEFAULT_LIMIT: Final[int] = 2**16 +# WebSocket opcode boundary: opcodes 0-7 are data frames, 8-15 are control frames +# Control frames (ping, pong, close) are never compressed +WS_CONTROL_FRAME_OPCODE: Final[int] = 8 + # For websockets, keeping latency low is extremely important as implementations -# generally expect to be able to send and receive messages quickly. We use a -# larger chunk size than the default to reduce the number of executor calls -# since the executor is a significant source of latency and overhead when -# the chunks are small. A size of 5KiB was chosen because it is also the -# same value python-zlib-ng choose to use as the threshold to release the GIL. +# generally expect to be able to send and receive messages quickly. We use a +# larger chunk size to reduce the number of executor calls and avoid task +# creation overhead, since both are significant sources of latency when chunks +# are small. A size of 16KiB was chosen as a balance between avoiding task +# overhead and not blocking the event loop too long with synchronous compression. -WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 +WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 16 * 1024 class WebSocketWriter: @@ -62,7 +67,9 @@ def __init__( self._closing = False self._limit = limit self._output_size = 0 - self._compressobj: Any = None # actually compressobj + self._compressobj: Optional[ZLibCompressor] = None + self._send_lock = asyncio.Lock() + self._background_tasks: Set[asyncio.Task[None]] = set() async def send_frame( self, message: bytes, opcode: int, compress: int | None = None @@ -71,39 +78,57 @@ async def send_frame( if self._closing and not (opcode & WSMsgType.CLOSE): raise ClientConnectionResetError("Cannot write to closing transport") - # RSV are the reserved bits in the frame header. They are used to - # indicate that the frame is using an extension. - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - rsv = 0 - # Only compress larger packets (disabled) - # Does small packet needs to be compressed? - # if self.compress and opcode < 8 and len(message) > 124: - if (compress or self.compress) and opcode < 8: - # RSV1 (rsv = 0x40) is set for compressed frames - # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 - rsv = 0x40 - - if compress: - # Do not set self._compress if compressing is for this frame - compressobj = self._make_compress_obj(compress) - else: # self.compress - if not self._compressobj: - self._compressobj = self._make_compress_obj(self.compress) - compressobj = self._compressobj - - message = ( - await compressobj.compress(message) - + compressobj.flush( - ZLibBackend.Z_FULL_FLUSH - if self.notakeover - else ZLibBackend.Z_SYNC_FLUSH - ) - ).removesuffix(WS_DEFLATE_TRAILING) - # Its critical that we do not return control to the event - # loop until we have finished sending all the compressed - # data. Otherwise we could end up mixing compressed frames - # if there are multiple coroutines compressing data. + if not (compress or self.compress) or opcode >= WS_CONTROL_FRAME_OPCODE: + # Non-compressed frames don't need lock or shield + self._write_websocket_frame(message, opcode, 0) + elif len(message) <= WEBSOCKET_MAX_SYNC_CHUNK_SIZE: + # Small compressed payloads - compress synchronously in event loop + # We need the lock even though sync compression has no await points. + # This prevents small frames from interleaving with large frames that + # compress in the executor, avoiding compressor state corruption. + async with self._send_lock: + self._send_compressed_frame_sync(message, opcode, compress) + else: + # Large compressed frames need shield to prevent corruption + # For large compressed frames, the entire compress+send + # operation must be atomic. If cancelled after compression but + # before send, the compressor state would be advanced but data + # not sent, corrupting subsequent frames. + # Create a task to shield from cancellation + # The lock is acquired inside the shielded task so the entire + # operation (lock + compress + send) completes atomically. + # Use eager_start on Python 3.12+ to avoid scheduling overhead + loop = asyncio.get_running_loop() + coro = self._send_compressed_frame_async_locked(message, opcode, compress) + if sys.version_info >= (3, 12): + send_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + send_task = loop.create_task(coro) + # Keep a strong reference to prevent garbage collection + self._background_tasks.add(send_task) + send_task.add_done_callback(self._background_tasks.discard) + await asyncio.shield(send_task) + + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. + if self._output_size > self._limit: + self._output_size = 0 + if self.protocol._paused: + await self.protocol._drain_helper() + def _write_websocket_frame(self, message: bytes, opcode: int, rsv: int) -> None: + """ + Write a websocket frame to the transport. + + This method handles frame header construction, masking, and writing to transport. + It does not handle compression or flow control - those are the responsibility + of the caller. + """ msg_length = len(message) use_mask = self.use_mask @@ -146,26 +171,85 @@ async def send_frame( self._output_size += header_len + msg_length - # It is safe to return control to the event loop when using compression - # after this point as we have already sent or buffered all the data. + def _get_compressor(self, compress: int | None) -> ZLibCompressor: + """Get or create a compressor object for the given compression level.""" + if compress: + # Do not set self._compress if compressing is for this frame + return ZLibCompressor( + level=ZLibBackend.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + if not self._compressobj: + self._compressobj = ZLibCompressor( + level=ZLibBackend.Z_BEST_SPEED, + wbits=-self.compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + return self._compressobj - # Once we have written output_size up to the limit, we call the - # drain helper which waits for the transport to be ready to accept - # more data. This is a flow control mechanism to prevent the buffer - # from growing too large. The drain helper will return right away - # if the writer is not paused. - if self._output_size > self._limit: - self._output_size = 0 - if self.protocol._paused: - await self.protocol._drain_helper() + def _send_compressed_frame_sync( + self, message: bytes, opcode: int, compress: int | None + ) -> None: + """ + Synchronous send for small compressed frames. - def _make_compress_obj(self, compress: int) -> ZLibCompressor: - return ZLibCompressor( - level=ZLibBackend.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + This is used for small compressed payloads that compress synchronously in the event loop. + Since there are no await points, this is inherently cancellation-safe. + """ + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + compressobj = self._get_compressor(compress) + # (0x40) RSV1 is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + self._write_websocket_frame( + ( + compressobj.compress_sync(message) + + compressobj.flush( + ZLibBackend.Z_FULL_FLUSH + if self.notakeover + else ZLibBackend.Z_SYNC_FLUSH + ) + ).removesuffix(WS_DEFLATE_TRAILING), + opcode, + 0x40, ) + async def _send_compressed_frame_async_locked( + self, message: bytes, opcode: int, compress: int | None + ) -> None: + """ + Async send for large compressed frames with lock. + + Acquires the lock and compresses large payloads asynchronously in + the executor. The lock is held for the entire operation to ensure + the compressor state is not corrupted by concurrent sends. + + MUST be run shielded from cancellation. If cancelled after + compression but before sending, the compressor state would be + advanced but data not sent, corrupting subsequent frames. + """ + async with self._send_lock: + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + compressobj = self._get_compressor(compress) + # (0x40) RSV1 is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + self._write_websocket_frame( + ( + await compressobj.compress(message) + + compressobj.flush( + ZLibBackend.Z_FULL_FLUSH + if self.notakeover + else ZLibBackend.Z_SYNC_FLUSH + ) + ).removesuffix(WS_DEFLATE_TRAILING), + opcode, + 0x40, + ) + async def close(self, code: int = 1000, message: bytes | str = b"") -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index dab571f5548..7b222f1b639 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -185,7 +185,6 @@ def __init__( if level is not None: kwargs["level"] = level self._compressor = self._zlib_backend.compressobj(**kwargs) - self._compress_lock = asyncio.Lock() def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) @@ -198,22 +197,37 @@ async def compress(self, data: bytes) -> bytes: If the data size is large than the max_sync_chunk_size, the compression will be done in the executor. Otherwise, the compression will be done in the event loop. + + **WARNING: This method is NOT cancellation-safe when used with flush().** + If this operation is cancelled, the compressor state may be corrupted. + The connection MUST be closed after cancellation to avoid data corruption + in subsequent compress operations. + + For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap + compress() + flush() + send operations in a shield and lock to ensure atomicity. """ - async with self._compress_lock: - # To ensure the stream is consistent in the event - # there are multiple writers, we need to lock - # the compressor so that only one writer can - # compress at a time. - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_running_loop().run_in_executor( - self._executor, self._compressor.compress, data - ) - return self.compress_sync(data) + # For large payloads, offload compression to executor to avoid blocking event loop + should_use_executor = ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ) + if should_use_executor: + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._compressor.compress, data + ) + return self.compress_sync(data) def flush(self, mode: int | None = None) -> bytes: + """Flush the compressor synchronously. + + **WARNING: This method is NOT cancellation-safe when called after compress().** + The flush() operation accesses shared compressor state. If compress() was + cancelled, calling flush() may result in corrupted data. The connection MUST + be closed after compress() cancellation. + + For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap + compress() + flush() + send operations in a shield and lock to ensure atomicity. + """ return self._compressor.flush( mode if mode is not None else self._zlib_backend.Z_FINISH ) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index add105719be..0912c312f6d 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -304,6 +304,7 @@ SocketSocketTransport ssl SSLContext startup +stateful subapplication subclassed subclasses diff --git a/tests/conftest.py b/tests/conftest.py index 1a7be393358..d3c4f237fb1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,9 @@ import socket import ssl import sys -from collections.abc import AsyncIterator, Generator, Iterator +import time +from collections.abc import AsyncIterator, Callable, Generator, Iterator +from concurrent.futures import Future, ThreadPoolExecutor from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory @@ -402,3 +404,27 @@ async def cleanup_payload_pending_file_closes( loop_futures = [f for f in payload._CLOSE_FUTURES if f.get_loop() is loop] if loop_futures: await asyncio.gather(*loop_futures, return_exceptions=True) + + +@pytest.fixture +def slow_executor() -> Iterator[ThreadPoolExecutor]: + """Executor that adds delay to simulate slow operations. + + Useful for testing cancellation and race conditions in compression tests. + """ + + class SlowExecutor(ThreadPoolExecutor): + """Executor that adds delay to operations.""" + + def submit( + self, fn: Callable[..., Any], /, *args: Any, **kwargs: Any + ) -> Future[Any]: + def slow_fn(*args: Any, **kwargs: Any) -> Any: + time.sleep(0.05) # Add delay to simulate slow operation + return fn(*args, **kwargs) + + return super().submit(slow_fn, *args, **kwargs) + + executor = SlowExecutor(max_workers=10) + yield executor + executor.shutdown(wait=True) diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index 34d8b55c16f..d0b1b972a58 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -1,6 +1,8 @@ import asyncio import random from collections.abc import Callable +from concurrent.futures import ThreadPoolExecutor +from contextlib import suppress from typing import Any from unittest import mock @@ -8,6 +10,7 @@ from aiohttp import WSMsgType from aiohttp._websocket.reader import WebSocketDataQueue +from aiohttp.base_protocol import BaseProtocol from aiohttp.compression_utils import ZLibBackend from aiohttp.http import WebSocketReader, WebSocketWriter @@ -27,7 +30,7 @@ def transport(): @pytest.fixture -def writer(protocol, transport): +async def writer(loop, protocol, transport): return WebSocketWriter(protocol, transport, use_mask=False) @@ -135,6 +138,130 @@ async def test_send_compress_text_per_message(protocol, transport) -> None: writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_send_compress_cancelled( + protocol: BaseProtocol, + transport: asyncio.Transport, + slow_executor: ThreadPoolExecutor, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that cancelled compression doesn't corrupt subsequent sends. + + Regression test for https://github.com/aio-libs/aiohttp/issues/11725 + """ + monkeypatch.setattr("aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", 1024) + writer = WebSocketWriter(protocol, transport, compress=15) + loop = asyncio.get_running_loop() + queue = WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) + reader = WebSocketReader(queue, 50000) + + # Replace executor with slow one to make race condition reproducible + writer._compressobj = writer._get_compressor(None) + writer._compressobj._executor = slow_executor + + # Create large data that will trigger executor-based compression + large_data_1 = b"A" * 10000 + large_data_2 = b"B" * 10000 + + # Start first send and cancel it during compression + async def send_and_cancel() -> None: + await writer.send_frame(large_data_1, WSMsgType.BINARY) + + task = asyncio.create_task(send_and_cancel()) + # Give it a moment to start compression + await asyncio.sleep(0.01) + task.cancel() + + # Await task cancellation (expected and intentionally ignored) + with suppress(asyncio.CancelledError): + await task + + # Send second message - this should NOT be corrupted + await writer.send_frame(large_data_2, WSMsgType.BINARY) + + # Verify the second send produced correct data + last_call = writer.transport.write.call_args_list[-1] # type: ignore[attr-defined] + call_bytes = last_call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + assert msg.type is WSMsgType.BINARY + # The data should be all B's, not mixed with A's from the cancelled send + assert msg.data == large_data_2 + + +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_send_compress_multiple_cancelled( + protocol: BaseProtocol, + transport: asyncio.Transport, + slow_executor: ThreadPoolExecutor, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that multiple compressed sends all complete despite cancellation. + + Regression test for https://github.com/aio-libs/aiohttp/issues/11725 + This verifies that once a send operation enters the shield, it completes + even if cancelled. With the lock inside the shield, all tasks that enter + the shield will complete their sends, even while waiting for the lock. + """ + monkeypatch.setattr("aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", 1024) + writer = WebSocketWriter(protocol, transport, compress=15) + loop = asyncio.get_running_loop() + queue = WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) + reader = WebSocketReader(queue, 50000) + + # Replace executor with slow one + writer._compressobj = writer._get_compressor(None) + writer._compressobj._executor = slow_executor + + # Create 5 large messages with different content + messages = [bytes([ord("A") + i]) * 10000 for i in range(5)] + + # Start sending all 5 messages - they'll queue due to the lock + tasks = [ + asyncio.create_task(writer.send_frame(msg, WSMsgType.BINARY)) + for msg in messages + ] + + # Cancel all tasks during execution + # With lock inside shield, all tasks that enter the shield will complete + # even while waiting for the lock + await asyncio.sleep(0.1) # Let tasks enter the shield + for task in tasks: + task.cancel() + + # Collect results + cancelled_count = 0 + for task in tasks: + try: + await task + except asyncio.CancelledError: + cancelled_count += 1 + + # Wait for all background tasks to complete + # (they continue running even after cancellation due to shield) + await asyncio.gather(*writer._background_tasks, return_exceptions=True) + + # All tasks that entered the shield should complete, even if cancelled + # With lock inside shield, all tasks enter shield immediately then wait for lock + sent_count = len(writer.transport.write.call_args_list) # type: ignore[attr-defined] + assert ( + sent_count == 5 + ), "All 5 sends should complete due to shield protecting lock acquisition" + + # Verify all sent messages are correct (no corruption) + for i in range(sent_count): + call = writer.transport.write.call_args_list[i] # type: ignore[attr-defined] + call_bytes = call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + assert msg.type is WSMsgType.BINARY + # Verify the data matches the expected message + expected_byte = bytes([ord("A") + i]) + assert msg.data == expected_byte * 10000, f"Message {i} corrupted" + + @pytest.mark.parametrize( ("max_sync_chunk_size", "payload_point_generator"), ( @@ -197,3 +324,6 @@ async def test_concurrent_messages( # we want to validate that all the bytes are # the same value assert bytes_data == bytes_data[0:1] * char_val + + # Wait for any background tasks to complete + await asyncio.gather(*writer._background_tasks, return_exceptions=True) From 5c75e63b097930be876fceb1c46d7a8d26f5ccaf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 13:14:10 -0500 Subject: [PATCH 19/21] [PR #11726/6cffcfd backport][3.13] Fix WebSocket compressed sends to be cancellation safe (#11731) --- CHANGES/11725.bugfix.rst | 1 + aiohttp/_websocket/writer.py | 196 +++++++++++++++++++++++---------- aiohttp/compression_utils.py | 42 ++++--- docs/spelling_wordlist.txt | 1 + tests/conftest.py | 29 ++++- tests/test_websocket_writer.py | 132 +++++++++++++++++++++- 6 files changed, 329 insertions(+), 72 deletions(-) create mode 100644 CHANGES/11725.bugfix.rst diff --git a/CHANGES/11725.bugfix.rst b/CHANGES/11725.bugfix.rst new file mode 100644 index 00000000000..e78fc054230 --- /dev/null +++ b/CHANGES/11725.bugfix.rst @@ -0,0 +1 @@ +Fixed WebSocket compressed sends to be cancellation safe. Tasks are now shielded during compression to prevent compressor state corruption. This ensures that the stateful compressor remains consistent even when send operations are cancelled -- by :user:`bdraco`. diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 19163f9afdf..9604202357c 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -2,8 +2,9 @@ import asyncio import random +import sys from functools import partial -from typing import Any, Final, Optional, Union +from typing import Final, Optional, Set, Union from ..base_protocol import BaseProtocol from ..client_exceptions import ClientConnectionResetError @@ -22,14 +23,18 @@ DEFAULT_LIMIT: Final[int] = 2**16 +# WebSocket opcode boundary: opcodes 0-7 are data frames, 8-15 are control frames +# Control frames (ping, pong, close) are never compressed +WS_CONTROL_FRAME_OPCODE: Final[int] = 8 + # For websockets, keeping latency low is extremely important as implementations -# generally expect to be able to send and receive messages quickly. We use a -# larger chunk size than the default to reduce the number of executor calls -# since the executor is a significant source of latency and overhead when -# the chunks are small. A size of 5KiB was chosen because it is also the -# same value python-zlib-ng choose to use as the threshold to release the GIL. +# generally expect to be able to send and receive messages quickly. We use a +# larger chunk size to reduce the number of executor calls and avoid task +# creation overhead, since both are significant sources of latency when chunks +# are small. A size of 16KiB was chosen as a balance between avoiding task +# overhead and not blocking the event loop too long with synchronous compression. -WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024 +WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 16 * 1024 class WebSocketWriter: @@ -62,7 +67,9 @@ def __init__( self._closing = False self._limit = limit self._output_size = 0 - self._compressobj: Any = None # actually compressobj + self._compressobj: Optional[ZLibCompressor] = None + self._send_lock = asyncio.Lock() + self._background_tasks: Set[asyncio.Task[None]] = set() async def send_frame( self, message: bytes, opcode: int, compress: Optional[int] = None @@ -71,39 +78,57 @@ async def send_frame( if self._closing and not (opcode & WSMsgType.CLOSE): raise ClientConnectionResetError("Cannot write to closing transport") - # RSV are the reserved bits in the frame header. They are used to - # indicate that the frame is using an extension. - # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 - rsv = 0 - # Only compress larger packets (disabled) - # Does small packet needs to be compressed? - # if self.compress and opcode < 8 and len(message) > 124: - if (compress or self.compress) and opcode < 8: - # RSV1 (rsv = 0x40) is set for compressed frames - # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 - rsv = 0x40 - - if compress: - # Do not set self._compress if compressing is for this frame - compressobj = self._make_compress_obj(compress) - else: # self.compress - if not self._compressobj: - self._compressobj = self._make_compress_obj(self.compress) - compressobj = self._compressobj - - message = ( - await compressobj.compress(message) - + compressobj.flush( - ZLibBackend.Z_FULL_FLUSH - if self.notakeover - else ZLibBackend.Z_SYNC_FLUSH - ) - ).removesuffix(WS_DEFLATE_TRAILING) - # Its critical that we do not return control to the event - # loop until we have finished sending all the compressed - # data. Otherwise we could end up mixing compressed frames - # if there are multiple coroutines compressing data. + if not (compress or self.compress) or opcode >= WS_CONTROL_FRAME_OPCODE: + # Non-compressed frames don't need lock or shield + self._write_websocket_frame(message, opcode, 0) + elif len(message) <= WEBSOCKET_MAX_SYNC_CHUNK_SIZE: + # Small compressed payloads - compress synchronously in event loop + # We need the lock even though sync compression has no await points. + # This prevents small frames from interleaving with large frames that + # compress in the executor, avoiding compressor state corruption. + async with self._send_lock: + self._send_compressed_frame_sync(message, opcode, compress) + else: + # Large compressed frames need shield to prevent corruption + # For large compressed frames, the entire compress+send + # operation must be atomic. If cancelled after compression but + # before send, the compressor state would be advanced but data + # not sent, corrupting subsequent frames. + # Create a task to shield from cancellation + # The lock is acquired inside the shielded task so the entire + # operation (lock + compress + send) completes atomically. + # Use eager_start on Python 3.12+ to avoid scheduling overhead + loop = asyncio.get_running_loop() + coro = self._send_compressed_frame_async_locked(message, opcode, compress) + if sys.version_info >= (3, 12): + send_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + send_task = loop.create_task(coro) + # Keep a strong reference to prevent garbage collection + self._background_tasks.add(send_task) + send_task.add_done_callback(self._background_tasks.discard) + await asyncio.shield(send_task) + + # It is safe to return control to the event loop when using compression + # after this point as we have already sent or buffered all the data. + # Once we have written output_size up to the limit, we call the + # drain helper which waits for the transport to be ready to accept + # more data. This is a flow control mechanism to prevent the buffer + # from growing too large. The drain helper will return right away + # if the writer is not paused. + if self._output_size > self._limit: + self._output_size = 0 + if self.protocol._paused: + await self.protocol._drain_helper() + def _write_websocket_frame(self, message: bytes, opcode: int, rsv: int) -> None: + """ + Write a websocket frame to the transport. + + This method handles frame header construction, masking, and writing to transport. + It does not handle compression or flow control - those are the responsibility + of the caller. + """ msg_length = len(message) use_mask = self.use_mask @@ -146,26 +171,85 @@ async def send_frame( self._output_size += header_len + msg_length - # It is safe to return control to the event loop when using compression - # after this point as we have already sent or buffered all the data. + def _get_compressor(self, compress: Optional[int]) -> ZLibCompressor: + """Get or create a compressor object for the given compression level.""" + if compress: + # Do not set self._compress if compressing is for this frame + return ZLibCompressor( + level=ZLibBackend.Z_BEST_SPEED, + wbits=-compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + if not self._compressobj: + self._compressobj = ZLibCompressor( + level=ZLibBackend.Z_BEST_SPEED, + wbits=-self.compress, + max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + ) + return self._compressobj - # Once we have written output_size up to the limit, we call the - # drain helper which waits for the transport to be ready to accept - # more data. This is a flow control mechanism to prevent the buffer - # from growing too large. The drain helper will return right away - # if the writer is not paused. - if self._output_size > self._limit: - self._output_size = 0 - if self.protocol._paused: - await self.protocol._drain_helper() + def _send_compressed_frame_sync( + self, message: bytes, opcode: int, compress: Optional[int] + ) -> None: + """ + Synchronous send for small compressed frames. - def _make_compress_obj(self, compress: int) -> ZLibCompressor: - return ZLibCompressor( - level=ZLibBackend.Z_BEST_SPEED, - wbits=-compress, - max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, + This is used for small compressed payloads that compress synchronously in the event loop. + Since there are no await points, this is inherently cancellation-safe. + """ + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + compressobj = self._get_compressor(compress) + # (0x40) RSV1 is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + self._write_websocket_frame( + ( + compressobj.compress_sync(message) + + compressobj.flush( + ZLibBackend.Z_FULL_FLUSH + if self.notakeover + else ZLibBackend.Z_SYNC_FLUSH + ) + ).removesuffix(WS_DEFLATE_TRAILING), + opcode, + 0x40, ) + async def _send_compressed_frame_async_locked( + self, message: bytes, opcode: int, compress: Optional[int] + ) -> None: + """ + Async send for large compressed frames with lock. + + Acquires the lock and compresses large payloads asynchronously in + the executor. The lock is held for the entire operation to ensure + the compressor state is not corrupted by concurrent sends. + + MUST be run shielded from cancellation. If cancelled after + compression but before sending, the compressor state would be + advanced but data not sent, corrupting subsequent frames. + """ + async with self._send_lock: + # RSV are the reserved bits in the frame header. They are used to + # indicate that the frame is using an extension. + # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + compressobj = self._get_compressor(compress) + # (0x40) RSV1 is set for compressed frames + # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1 + self._write_websocket_frame( + ( + await compressobj.compress(message) + + compressobj.flush( + ZLibBackend.Z_FULL_FLUSH + if self.notakeover + else ZLibBackend.Z_SYNC_FLUSH + ) + ).removesuffix(WS_DEFLATE_TRAILING), + opcode, + 0x40, + ) + async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index c97abd0f822..c51fc524f98 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -185,7 +185,6 @@ def __init__( if level is not None: kwargs["level"] = level self._compressor = self._zlib_backend.compressobj(**kwargs) - self._compress_lock = asyncio.Lock() def compress_sync(self, data: bytes) -> bytes: return self._compressor.compress(data) @@ -198,22 +197,37 @@ async def compress(self, data: bytes) -> bytes: If the data size is large than the max_sync_chunk_size, the compression will be done in the executor. Otherwise, the compression will be done in the event loop. + + **WARNING: This method is NOT cancellation-safe when used with flush().** + If this operation is cancelled, the compressor state may be corrupted. + The connection MUST be closed after cancellation to avoid data corruption + in subsequent compress operations. + + For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap + compress() + flush() + send operations in a shield and lock to ensure atomicity. """ - async with self._compress_lock: - # To ensure the stream is consistent in the event - # there are multiple writers, we need to lock - # the compressor so that only one writer can - # compress at a time. - if ( - self._max_sync_chunk_size is not None - and len(data) > self._max_sync_chunk_size - ): - return await asyncio.get_running_loop().run_in_executor( - self._executor, self._compressor.compress, data - ) - return self.compress_sync(data) + # For large payloads, offload compression to executor to avoid blocking event loop + should_use_executor = ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ) + if should_use_executor: + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._compressor.compress, data + ) + return self.compress_sync(data) def flush(self, mode: Optional[int] = None) -> bytes: + """Flush the compressor synchronously. + + **WARNING: This method is NOT cancellation-safe when called after compress().** + The flush() operation accesses shared compressor state. If compress() was + cancelled, calling flush() may result in corrupted data. The connection MUST + be closed after compress() cancellation. + + For cancellation-safe compression (e.g., WebSocket), the caller MUST wrap + compress() + flush() + send operations in a shield and lock to ensure atomicity. + """ return self._compressor.flush( mode if mode is not None else self._zlib_backend.Z_FINISH ) diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index add105719be..0912c312f6d 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -304,6 +304,7 @@ SocketSocketTransport ssl SSLContext startup +stateful subapplication subclassed subclasses diff --git a/tests/conftest.py b/tests/conftest.py index a4dba76efa6..6d91d08f10a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,10 +5,13 @@ import socket import ssl import sys +import time +from collections.abc import AsyncIterator, Callable, Iterator +from concurrent.futures import Future, ThreadPoolExecutor from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, AsyncIterator, Generator, Iterator +from typing import Any, Generator from unittest import mock from uuid import uuid4 @@ -401,3 +404,27 @@ async def cleanup_payload_pending_file_closes( loop_futures = [f for f in payload._CLOSE_FUTURES if f.get_loop() is loop] if loop_futures: await asyncio.gather(*loop_futures, return_exceptions=True) + + +@pytest.fixture +def slow_executor() -> Iterator[ThreadPoolExecutor]: + """Executor that adds delay to simulate slow operations. + + Useful for testing cancellation and race conditions in compression tests. + """ + + class SlowExecutor(ThreadPoolExecutor): + """Executor that adds delay to operations.""" + + def submit( + self, fn: Callable[..., Any], /, *args: Any, **kwargs: Any + ) -> Future[Any]: + def slow_fn(*args: Any, **kwargs: Any) -> Any: + time.sleep(0.05) # Add delay to simulate slow operation + return fn(*args, **kwargs) + + return super().submit(slow_fn, *args, **kwargs) + + executor = SlowExecutor(max_workers=10) + yield executor + executor.shutdown(wait=True) diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index a985acfb7f6..6ec5aecb2a6 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -1,5 +1,7 @@ import asyncio import random +from concurrent.futures import ThreadPoolExecutor +from contextlib import suppress from typing import Any, Callable from unittest import mock @@ -7,6 +9,7 @@ from aiohttp import WSMsgType from aiohttp._websocket.reader import WebSocketDataQueue +from aiohttp.base_protocol import BaseProtocol from aiohttp.compression_utils import ZLibBackend from aiohttp.http import WebSocketReader, WebSocketWriter @@ -26,7 +29,7 @@ def transport(): @pytest.fixture -def writer(protocol, transport): +async def writer(loop, protocol, transport): return WebSocketWriter(protocol, transport, use_mask=False) @@ -134,6 +137,130 @@ async def test_send_compress_text_per_message(protocol, transport) -> None: writer.transport.write.assert_called_with(b"\xc1\x06*I\xad(\x01\x00") # type: ignore[attr-defined] +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_send_compress_cancelled( + protocol: BaseProtocol, + transport: asyncio.Transport, + slow_executor: ThreadPoolExecutor, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that cancelled compression doesn't corrupt subsequent sends. + + Regression test for https://github.com/aio-libs/aiohttp/issues/11725 + """ + monkeypatch.setattr("aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", 1024) + writer = WebSocketWriter(protocol, transport, compress=15) + loop = asyncio.get_running_loop() + queue = WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) + reader = WebSocketReader(queue, 50000) + + # Replace executor with slow one to make race condition reproducible + writer._compressobj = writer._get_compressor(None) + writer._compressobj._executor = slow_executor + + # Create large data that will trigger executor-based compression + large_data_1 = b"A" * 10000 + large_data_2 = b"B" * 10000 + + # Start first send and cancel it during compression + async def send_and_cancel() -> None: + await writer.send_frame(large_data_1, WSMsgType.BINARY) + + task = asyncio.create_task(send_and_cancel()) + # Give it a moment to start compression + await asyncio.sleep(0.01) + task.cancel() + + # Await task cancellation (expected and intentionally ignored) + with suppress(asyncio.CancelledError): + await task + + # Send second message - this should NOT be corrupted + await writer.send_frame(large_data_2, WSMsgType.BINARY) + + # Verify the second send produced correct data + last_call = writer.transport.write.call_args_list[-1] # type: ignore[attr-defined] + call_bytes = last_call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + assert msg.type is WSMsgType.BINARY + # The data should be all B's, not mixed with A's from the cancelled send + assert msg.data == large_data_2 + + +@pytest.mark.usefixtures("parametrize_zlib_backend") +async def test_send_compress_multiple_cancelled( + protocol: BaseProtocol, + transport: asyncio.Transport, + slow_executor: ThreadPoolExecutor, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that multiple compressed sends all complete despite cancellation. + + Regression test for https://github.com/aio-libs/aiohttp/issues/11725 + This verifies that once a send operation enters the shield, it completes + even if cancelled. With the lock inside the shield, all tasks that enter + the shield will complete their sends, even while waiting for the lock. + """ + monkeypatch.setattr("aiohttp._websocket.writer.WEBSOCKET_MAX_SYNC_CHUNK_SIZE", 1024) + writer = WebSocketWriter(protocol, transport, compress=15) + loop = asyncio.get_running_loop() + queue = WebSocketDataQueue(mock.Mock(_reading_paused=False), 2**16, loop=loop) + reader = WebSocketReader(queue, 50000) + + # Replace executor with slow one + writer._compressobj = writer._get_compressor(None) + writer._compressobj._executor = slow_executor + + # Create 5 large messages with different content + messages = [bytes([ord("A") + i]) * 10000 for i in range(5)] + + # Start sending all 5 messages - they'll queue due to the lock + tasks = [ + asyncio.create_task(writer.send_frame(msg, WSMsgType.BINARY)) + for msg in messages + ] + + # Cancel all tasks during execution + # With lock inside shield, all tasks that enter the shield will complete + # even while waiting for the lock + await asyncio.sleep(0.1) # Let tasks enter the shield + for task in tasks: + task.cancel() + + # Collect results + cancelled_count = 0 + for task in tasks: + try: + await task + except asyncio.CancelledError: + cancelled_count += 1 + + # Wait for all background tasks to complete + # (they continue running even after cancellation due to shield) + await asyncio.gather(*writer._background_tasks, return_exceptions=True) + + # All tasks that entered the shield should complete, even if cancelled + # With lock inside shield, all tasks enter shield immediately then wait for lock + sent_count = len(writer.transport.write.call_args_list) # type: ignore[attr-defined] + assert ( + sent_count == 5 + ), "All 5 sends should complete due to shield protecting lock acquisition" + + # Verify all sent messages are correct (no corruption) + for i in range(sent_count): + call = writer.transport.write.call_args_list[i] # type: ignore[attr-defined] + call_bytes = call[0][0] + result, _ = reader.feed_data(call_bytes) + assert result is False + msg = await queue.read() + assert msg.type is WSMsgType.BINARY + # Verify the data matches the expected message + expected_byte = bytes([ord("A") + i]) + assert msg.data == expected_byte * 10000, f"Message {i} corrupted" + + @pytest.mark.parametrize( ("max_sync_chunk_size", "payload_point_generator"), ( @@ -196,3 +323,6 @@ async def test_concurrent_messages( # we want to validate that all the bytes are # the same value assert bytes_data == bytes_data[0:1] * char_val + + # Wait for any background tasks to complete + await asyncio.gather(*writer._background_tasks, return_exceptions=True) From 91547dfe5ad99184ce50a7c039bf4d8844b240b4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 28 Oct 2025 14:11:38 -0500 Subject: [PATCH 20/21] Release 3.13.2 (#11733) --- CHANGES.rst | 34 ++++++++++++++++++++++++++++++++++ CHANGES/11632.bugfix.rst | 1 - CHANGES/11713.bugfix.rst | 1 - CHANGES/11714.bugfix.rst | 1 - CHANGES/11725.bugfix.rst | 1 - aiohttp/__init__.py | 2 +- 6 files changed, 35 insertions(+), 5 deletions(-) delete mode 100644 CHANGES/11632.bugfix.rst delete mode 100644 CHANGES/11713.bugfix.rst delete mode 120000 CHANGES/11714.bugfix.rst delete mode 100644 CHANGES/11725.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index a14296b581a..fd193db6959 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,40 @@ .. towncrier release notes start +3.13.2 (2025-10-28) +=================== + +Bug fixes +--------- + +- Fixed cookie parser to continue parsing subsequent cookies when encountering a malformed cookie that fails regex validation, such as Google's ``g_state`` cookie with unescaped quotes -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11632`. + + + +- Fixed loading netrc credentials from the default :file:`~/.netrc` (:file:`~/_netrc` on Windows) location when the :envvar:`NETRC` environment variable is not set -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11713`, :issue:`11714`. + + + +- Fixed WebSocket compressed sends to be cancellation safe. Tasks are now shielded during compression to prevent compressor state corruption. This ensures that the stateful compressor remains consistent even when send operations are cancelled -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11725`. + + + + +---- + + 3.13.1 (2025-10-17) =================== diff --git a/CHANGES/11632.bugfix.rst b/CHANGES/11632.bugfix.rst deleted file mode 100644 index c07bfb2b1f7..00000000000 --- a/CHANGES/11632.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed cookie parser to continue parsing subsequent cookies when encountering a malformed cookie that fails regex validation, such as Google's ``g_state`` cookie with unescaped quotes -- by :user:`bdraco`. diff --git a/CHANGES/11713.bugfix.rst b/CHANGES/11713.bugfix.rst deleted file mode 100644 index dbb45a5254f..00000000000 --- a/CHANGES/11713.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed loading netrc credentials from the default :file:`~/.netrc` (:file:`~/_netrc` on Windows) location when the :envvar:`NETRC` environment variable is not set -- by :user:`bdraco`. diff --git a/CHANGES/11714.bugfix.rst b/CHANGES/11714.bugfix.rst deleted file mode 120000 index 5a506f1ded3..00000000000 --- a/CHANGES/11714.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -11713.bugfix.rst \ No newline at end of file diff --git a/CHANGES/11725.bugfix.rst b/CHANGES/11725.bugfix.rst deleted file mode 100644 index e78fc054230..00000000000 --- a/CHANGES/11725.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed WebSocket compressed sends to be cancellation safe. Tasks are now shielded during compression to prevent compressor state corruption. This ensures that the stateful compressor remains consistent even when send operations are cancelled -- by :user:`bdraco`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 443b05ff275..396d5024dd6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.1.dev0" +__version__ = "3.13.2" from typing import TYPE_CHECKING, Tuple From e1aec0ac94277a8b67092293aeac3c19e17fdd86 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Tue, 28 Oct 2025 22:48:34 +0100 Subject: [PATCH 21/21] Move dependency metadata from `setup.cfg` to `pyproject.toml` PR #11643. This is a follow-up to #9951 implementing PEP 621. --- .github/workflows/ci-cd.yml | 2 +- CHANGES/11643.packaging.rst | 2 ++ Makefile | 2 +- pyproject.toml | 19 +++++++++++++++++-- requirements/runtime-deps.in | 6 +++--- requirements/sync-direct-runtime-deps.py | 22 ++++++++++++++-------- setup.cfg | 17 ----------------- 7 files changed, 38 insertions(+), 32 deletions(-) create mode 100644 CHANGES/11643.packaging.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 2e2b84d788a..a600bf0cf10 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -41,7 +41,7 @@ jobs: submodules: true - name: >- Verify that `requirements/runtime-deps.in` - is in sync with `setup.cfg` + is in sync with `pyproject.toml` run: | set -eEuo pipefail make sync-direct-runtime-deps diff --git a/CHANGES/11643.packaging.rst b/CHANGES/11643.packaging.rst new file mode 100644 index 00000000000..8ef91a18788 --- /dev/null +++ b/CHANGES/11643.packaging.rst @@ -0,0 +1,2 @@ +Moved dependency metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` +-- by :user:`cdce8p`. diff --git a/Makefile b/Makefile index 6531d435f22..7fa6aa0d437 100644 --- a/Makefile +++ b/Makefile @@ -184,5 +184,5 @@ install-dev: .develop .PHONY: sync-direct-runtime-deps sync-direct-runtime-deps: - @echo Updating 'requirements/runtime-deps.in' from 'setup.cfg'... >&2 + @echo Updating 'requirements/runtime-deps.in' from 'pyproject.toml'... >&2 @python requirements/sync-direct-runtime-deps.py diff --git a/pyproject.toml b/pyproject.toml index 1b6e0ca3279..8b707ddc4cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,12 +32,27 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP", ] requires-python = ">= 3.10" +dependencies = [ + "aiohappyeyeballs >= 2.5.0", + "aiosignal >= 1.4.0", + "async-timeout >= 4.0, < 6.0 ; python_version < '3.11'", + "frozenlist >= 1.1.1", + "multidict >=4.5, < 7.0", + "propcache >= 0.2.0", + "yarl >= 1.17.0, < 2.0", +] dynamic = [ - "dependencies", - "optional-dependencies", "version", ] +[project.optional-dependencies] +speedups = [ + "aiodns >= 3.3.0", + "Brotli; platform_python_implementation == 'CPython'", + "brotlicffi; platform_python_implementation != 'CPython'", + "backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14'", +] + [[project.maintainers]] name = "aiohttp team" email = "team@aiohttp.org" diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index f6c6f2e8caa..0be3bb7f98f 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -1,10 +1,10 @@ -# Extracted from `setup.cfg` via `make sync-direct-runtime-deps` +# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps` aiodns >= 3.3.0 aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 -async-timeout >= 4.0, < 6.0 ; python_version < "3.11" -backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" +async-timeout >= 4.0, < 6.0 ; python_version < '3.11' +backports.zstd; platform_python_implementation == 'CPython' and python_version < '3.14' Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 diff --git a/requirements/sync-direct-runtime-deps.py b/requirements/sync-direct-runtime-deps.py index adc28bdd287..dbe445383c1 100755 --- a/requirements/sync-direct-runtime-deps.py +++ b/requirements/sync-direct-runtime-deps.py @@ -1,16 +1,22 @@ #!/usr/bin/env python -"""Sync direct runtime dependencies from setup.cfg to runtime-deps.in.""" +"""Sync direct runtime dependencies from pyproject.toml to runtime-deps.in.""" -from configparser import ConfigParser +import sys from pathlib import Path -cfg = ConfigParser() -cfg.read(Path("setup.cfg")) -reqs = cfg["options"]["install_requires"] + cfg.items("options.extras_require")[0][1] -reqs = sorted(reqs.split("\n"), key=str.casefold) -reqs.remove("") +if sys.version_info >= (3, 11): + import tomllib +else: + raise RuntimeError("Use Python 3.11+ to run 'make sync-direct-runtime-deps'") + +data = tomllib.loads(Path("pyproject.toml").read_text()) +reqs = ( + data["project"]["dependencies"] + + data["project"]["optional-dependencies"]["speedups"] +) +reqs = sorted(reqs, key=str.casefold) with open(Path("requirements", "runtime-deps.in"), "w") as outfile: - header = "# Extracted from `setup.cfg` via `make sync-direct-runtime-deps`\n\n" + header = "# Extracted from `pyproject.toml` via `make sync-direct-runtime-deps`\n\n" outfile.write(header) outfile.write("\n".join(reqs) + "\n") diff --git a/setup.cfg b/setup.cfg index 4f599406df0..dce66515bad 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,20 +1,3 @@ -[options] -install_requires = - aiohappyeyeballs >= 2.5.0 - aiosignal >= 1.4.0 - async-timeout >= 4.0, < 6.0 ; python_version < "3.11" - frozenlist >= 1.1.1 - multidict >=4.5, < 7.0 - propcache >= 0.2.0 - yarl >= 1.17.0, < 2.0 - -[options.extras_require] -speedups = - aiodns >= 3.3.0 - Brotli; platform_python_implementation == 'CPython' - brotlicffi; platform_python_implementation != 'CPython' - backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" - [pep8] max-line-length=79