From 8cf01adc8c8dbf706e4cd33bf89fd5195f638715 Mon Sep 17 00:00:00 2001
From: Gabriel Esteban
Date: Mon, 26 Sep 2022 20:13:52 +0200
Subject: [PATCH 01/70] [3.8] Fix cookie handling (#6638) (#6974)
* Fix cookie handling
* Fix cookie handling
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* Update aiohttp/cookiejar.py
Co-authored-by: Sam Bull
Co-authored-by: Bruno Cabral
Co-authored-by: pre-commit-ci[bot]
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Sam Bull
(cherry picked from commit 916b3eecda825cd42415b6f8821c035647baf890)
## What do these changes do?
## Are there changes in behavior for the user?
## Related issue number
## Checklist
- [ ] I think the code is well written
- [ ] Unit tests for the changes exist
- [ ] Documentation reflects the changes
- [ ] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
* The format is <Name> <Surname>.
* Please keep alphabetical order, the file is sorted by names.
- [ ] Add a new news fragment into the `CHANGES` folder
* name it `.` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
* ensure type is one of the following:
* `.feature`: Signifying a new feature.
* `.bugfix`: Signifying a bug fix.
* `.doc`: Signifying a documentation improvement.
* `.removal`: Signifying a deprecation or removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."
Co-authored-by: Bruno Cabral
---
CHANGES/6638.bugfix | 1 +
CONTRIBUTORS.txt | 1 +
aiohttp/cookiejar.py | 32 ++++++++++++++++++--------------
tests/test_cookiejar.py | 22 ++++++++++++++++++++++
4 files changed, 42 insertions(+), 14 deletions(-)
create mode 100644 CHANGES/6638.bugfix
diff --git a/CHANGES/6638.bugfix b/CHANGES/6638.bugfix
new file mode 100644
index 00000000000..8154dcfe3f3
--- /dev/null
+++ b/CHANGES/6638.bugfix
@@ -0,0 +1 @@
+Do not overwrite cookies with same name and domain when the path is different.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index e51f68f6f03..6c2fabbdece 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -61,6 +61,7 @@ Brian Bouterse
Brian C. Lane
Brian Muller
Bruce Merry
+Bruno Souza Cabral
Bryan Kok
Bryce Drennan
Carl George
diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py
index 1ac8854a062..6c88b47e358 100644
--- a/aiohttp/cookiejar.py
+++ b/aiohttp/cookiejar.py
@@ -65,7 +65,9 @@ def __init__(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(loop=loop)
- self._cookies: DefaultDict[str, SimpleCookie[str]] = defaultdict(SimpleCookie)
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie[str]] = defaultdict(
+ SimpleCookie
+ )
self._host_only_cookies: Set[Tuple[str, str]] = set()
self._unsafe = unsafe
self._quote_cookie = quote_cookie
@@ -82,7 +84,7 @@ def __init__(
]
self._treat_as_secure_origin = treat_as_secure_origin
self._next_expiration = next_whole_second()
- self._expirations: Dict[Tuple[str, str], datetime.datetime] = {}
+ self._expirations: Dict[Tuple[str, str, str], datetime.datetime] = {}
# #4515: datetime.max may not be representable on 32-bit platforms
self._max_time = self.MAX_TIME
try:
@@ -110,20 +112,20 @@ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
to_del = []
now = datetime.datetime.now(datetime.timezone.utc)
- for domain, cookie in self._cookies.items():
+ for (domain, path), cookie in self._cookies.items():
for name, morsel in cookie.items():
- key = (domain, name)
+ key = (domain, path, name)
if (
key in self._expirations and self._expirations[key] <= now
) or predicate(morsel):
to_del.append(key)
- for domain, name in to_del:
- key = (domain, name)
- self._host_only_cookies.discard(key)
+ for domain, path, name in to_del:
+ self._host_only_cookies.discard((domain, name))
+ key = (domain, path, name)
if key in self._expirations:
- del self._expirations[(domain, name)]
- self._cookies[domain].pop(name, None)
+ del self._expirations[(domain, path, name)]
+ self._cookies[(domain, path)].pop(name, None)
next_expiration = min(self._expirations.values(), default=self._max_time)
try:
@@ -147,9 +149,11 @@ def __len__(self) -> int:
def _do_expiration(self) -> None:
self.clear(lambda x: False)
- def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
+ def _expire_cookie(
+ self, when: datetime.datetime, domain: str, path: str, name: str
+ ) -> None:
self._next_expiration = min(self._next_expiration, when)
- self._expirations[(domain, name)] = when
+ self._expirations[(domain, path, name)] = when
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
@@ -211,7 +215,7 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No
) + datetime.timedelta(seconds=delta_seconds)
except OverflowError:
max_age_expiration = self._max_time
- self._expire_cookie(max_age_expiration, domain, name)
+ self._expire_cookie(max_age_expiration, domain, path, name)
except ValueError:
cookie["max-age"] = ""
@@ -220,11 +224,11 @@ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> No
if expires:
expire_time = self._parse_date(expires)
if expire_time:
- self._expire_cookie(expire_time, domain, name)
+ self._expire_cookie(expire_time, domain, path, name)
else:
cookie["expires"] = ""
- self._cookies[domain][name] = cookie
+ self._cookies[(domain, path)][name] = cookie
self._do_expiration()
diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py
index 54f1f72802a..66f18c31d72 100644
--- a/tests/test_cookiejar.py
+++ b/tests/test_cookiejar.py
@@ -664,6 +664,28 @@ async def make_jar():
# Assert that there is a cookie.
assert len(jar) == 1
+ def test_path_filter_diff_folder_same_name(self) -> None:
+ async def make_jar():
+ return CookieJar(unsafe=True)
+
+ jar = self.loop.run_until_complete(make_jar())
+
+ jar.update_cookies(
+ SimpleCookie("path-cookie=zero; Domain=pathtest.com; Path=/; ")
+ )
+ jar.update_cookies(
+ SimpleCookie("path-cookie=one; Domain=pathtest.com; Path=/one; ")
+ )
+ self.assertEqual(len(jar), 2)
+
+ jar_filtered = jar.filter_cookies(URL("http://pathtest.com/"))
+ self.assertEqual(len(jar_filtered), 1)
+ self.assertEqual(jar_filtered["path-cookie"].value, "zero")
+
+ jar_filtered = jar.filter_cookies(URL("http://pathtest.com/one"))
+ self.assertEqual(len(jar_filtered), 1)
+ self.assertEqual(jar_filtered["path-cookie"].value, "one")
+
async def test_dummy_cookie_jar() -> None:
cookie = SimpleCookie("foo=bar; Domain=example.com;")
From ed04b4da2e0fbb504728064335fc0cdcd52773c6 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Fri, 30 Dec 2022 21:39:17 +0100
Subject: [PATCH 02/70] [PR #7154/283861dd backport][3.8] fixed error in
ContentDisposition doc (#7155)
**This is a backport of PR #7154 as merged into master
(283861dddf7bac50188ad3dd63dda93bccfca4bc).**
## What do these changes do?
Fix small mistake in doc
## Are there changes in behavior for the user?
No.
## Related issue number
https://github.com/aio-libs/aiohttp/issues/7151
Co-authored-by: solarjoe
---
docs/client_reference.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index f95f5c6c781..0281a27e81b 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -2081,7 +2081,7 @@ All exceptions are available as members of *aiohttp* module.
Represent Content-Disposition header
- .. attribute:: value
+ .. attribute:: type
A :class:`str` instance. Value of Content-Disposition header
itself, e.g. ``attachment``.
From 9cde3b47e10b04b9db3bf86611d01132d852c0c7 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Fri, 10 Feb 2023 22:50:49 +0000
Subject: [PATCH 03/70] Update .pre-commit-config.yaml
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a98fbce6854..9cc0e83b2c4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -30,7 +30,7 @@ repos:
hooks:
- id: yesqa
- repo: https://github.com/PyCQA/isort
- rev: '5.9.3'
+ rev: '5.12.0'
hooks:
- id: isort
- repo: https://github.com/psf/black
From ba573e267c0601e97b7caafb7ac9ad4ec7c7d52d Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sat, 11 Feb 2023 00:09:40 +0000
Subject: [PATCH 04/70] [3.8] Fix CI (#7143) (#7200)
---
.github/workflows/ci.yml | 24 ++++++++++++------------
.mypy.ini | 2 ++
aiohttp/helpers.py | 1 +
tests/test_client_request.py | 4 ++--
tests/test_streams.py | 4 ++--
tests/test_web_app.py | 4 ++--
6 files changed, 21 insertions(+), 18 deletions(-)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 05f4eb33ec4..27a2e63215e 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -59,9 +59,9 @@ jobs:
- name: Run linters
run: |
make mypy
- - name: Install libenchant-dev
+ - name: Install libenchant
run: |
- sudo apt install libenchant-dev
+ sudo apt install libenchant-2-dev
- name: Install spell checker
run: |
pip install -r requirements/doc-spelling.txt -c requirements/constraints.txt
@@ -119,36 +119,36 @@ jobs:
matrix:
pyver: [3.6, 3.7, 3.8, 3.9, '3.10']
no-extensions: ['', 'Y']
- os: [ubuntu, macos, windows]
+ os: [ubuntu-20.04, macos-latest, windows-latest]
exclude:
- - os: macos
+ - os: macos-latest
no-extensions: 'Y'
- - os: macos
+ - os: macos-latest
pyver: 3.7
- - os: macos
+ - os: macos-latest
pyver: 3.8
- - os: windows
+ - os: windows-latest
no-extensions: 'Y'
experimental: [false]
include:
- pyver: pypy-3.8
no-extensions: 'Y'
- os: ubuntu
+ os: ubuntu-latest
experimental: false
- - os: macos
+ - os: macos-latest
pyver: "3.11.0-alpha - 3.11.0"
experimental: true
no-extensions: 'Y'
- - os: ubuntu
+ - os: ubuntu-latest
pyver: "3.11.0-alpha - 3.11.0"
experimental: false
no-extensions: 'Y'
- - os: windows
+ - os: windows-latest
pyver: "3.11.0-alpha - 3.11.0"
experimental: true
no-extensions: 'Y'
fail-fast: true
- runs-on: ${{ matrix.os }}-latest
+ runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }}
steps:
- name: Checkout
diff --git a/.mypy.ini b/.mypy.ini
index 7e474fc0bab..a216570f5cf 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -1,4 +1,6 @@
[mypy]
+# Only for 3.8 branch which is already EOL
+ignore_errors = True
files = aiohttp, examples
check_untyped_defs = True
follow_imports_for_stubs = True
diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py
index 0469ee41de5..874ab1ac076 100644
--- a/aiohttp/helpers.py
+++ b/aiohttp/helpers.py
@@ -62,6 +62,7 @@
PY_37 = sys.version_info >= (3, 7)
PY_38 = sys.version_info >= (3, 8)
PY_310 = sys.version_info >= (3, 10)
+PY_311 = sys.version_info >= (3, 11)
if sys.version_info < (3, 7):
import idna_ssl
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index c822d0c0206..ea4ebb4af10 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -19,7 +19,7 @@
Fingerprint,
_merge_ssl_params,
)
-from aiohttp.helpers import PY_310
+from aiohttp.helpers import PY_311
from aiohttp.test_utils import make_mocked_coro
@@ -276,7 +276,7 @@ def test_host_header_ipv6_with_port(make_request) -> None:
@pytest.mark.xfail(
- PY_310,
+ PY_311,
reason="No idea why ClientRequest() is constructed out of loop but "
"it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
diff --git a/tests/test_streams.py b/tests/test_streams.py
index 5ddc4c94ddc..23f159a0e3b 100644
--- a/tests/test_streams.py
+++ b/tests/test_streams.py
@@ -12,7 +12,7 @@
from re_assert import Matches
from aiohttp import streams
-from aiohttp.helpers import PY_310
+from aiohttp.helpers import PY_311
DATA = b"line1\nline2\nline3\n"
@@ -84,7 +84,7 @@ async def test_create_waiter(self) -> None:
await stream._wait("test")
@pytest.mark.xfail(
- PY_310,
+ PY_311,
reason="No idea why ClientRequest() is constructed out of loop but "
"it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
diff --git a/tests/test_web_app.py b/tests/test_web_app.py
index 7501fba032b..165b3776cef 100644
--- a/tests/test_web_app.py
+++ b/tests/test_web_app.py
@@ -6,7 +6,7 @@
from aiohttp import log, web
from aiohttp.abc import AbstractAccessLogger, AbstractRouter
-from aiohttp.helpers import DEBUG, PY_36, PY_310
+from aiohttp.helpers import DEBUG, PY_36, PY_311
from aiohttp.test_utils import make_mocked_coro
from aiohttp.typedefs import Handler
@@ -40,7 +40,7 @@ async def test_set_loop() -> None:
@pytest.mark.xfail(
- PY_310,
+ PY_311,
reason="No idea why _set_loop() is constructed out of loop "
"but it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
From 565cc2132a4c3667e0601f055cff913526226352 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sat, 11 Feb 2023 00:11:02 +0000
Subject: [PATCH 05/70] Raise upper bound of charset-normalizer
---
setup.cfg | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.cfg b/setup.cfg
index cd279a1fc90..2b19c5283c6 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -49,7 +49,7 @@ include_package_data = True
install_requires =
attrs >= 17.3.0
- charset-normalizer >=2.0, < 3.0
+ charset-normalizer >=2.0, < 4.0
multidict >=4.5, < 7.0
async_timeout >= 4.0.0a3, < 5.0
asynctest == 0.13.0; python_version<"3.8"
From 28854a4743cb367351397bd0a8b38469f28f369a Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sat, 11 Feb 2023 00:53:35 +0000
Subject: [PATCH 06/70] =?UTF-8?q?Fix=20ConnectionResetError=20not=20being?=
=?UTF-8?q?=20raised=20when=20the=20transport=20is=20close=E2=80=A6=20(#71?=
=?UTF-8?q?99)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: J. Nick Koston
---
CHANGES/7180.bugfix | 1 +
aiohttp/base_protocol.py | 9 ++++++---
aiohttp/http_writer.py | 10 ++++------
tests/test_base_protocol.py | 8 ++++----
tests/test_client_proto.py | 14 ++++++++++++++
tests/test_http_writer.py | 15 +++++++++++++++
6 files changed, 44 insertions(+), 13 deletions(-)
create mode 100644 CHANGES/7180.bugfix
diff --git a/CHANGES/7180.bugfix b/CHANGES/7180.bugfix
new file mode 100644
index 00000000000..66980638868
--- /dev/null
+++ b/CHANGES/7180.bugfix
@@ -0,0 +1 @@
+``ConnectionResetError`` will always be raised when ``StreamWriter.write`` is called after ``connection_lost`` has been called on the ``BaseProtocol``
diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py
index 8189835e211..4c9f0a752e3 100644
--- a/aiohttp/base_protocol.py
+++ b/aiohttp/base_protocol.py
@@ -18,11 +18,15 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop: asyncio.AbstractEventLoop = loop
self._paused = False
self._drain_waiter: Optional[asyncio.Future[None]] = None
- self._connection_lost = False
self._reading_paused = False
self.transport: Optional[asyncio.Transport] = None
+ @property
+ def connected(self) -> bool:
+ """Return True if the connection is open."""
+ return self.transport is not None
+
def pause_writing(self) -> None:
assert not self._paused
self._paused = True
@@ -59,7 +63,6 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None:
self.transport = tr
def connection_lost(self, exc: Optional[BaseException]) -> None:
- self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
@@ -76,7 +79,7 @@ def connection_lost(self, exc: Optional[BaseException]) -> None:
waiter.set_exception(exc)
async def _drain_helper(self) -> None:
- if self._connection_lost:
+ if not self.connected:
raise ConnectionResetError("Connection lost")
if not self._paused:
return
diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py
index db3d6a04897..73f0f96f0ae 100644
--- a/aiohttp/http_writer.py
+++ b/aiohttp/http_writer.py
@@ -35,7 +35,6 @@ def __init__(
on_headers_sent: _T_OnHeadersSent = None,
) -> None:
self._protocol = protocol
- self._transport = protocol.transport
self.loop = loop
self.length = None
@@ -52,7 +51,7 @@ def __init__(
@property
def transport(self) -> Optional[asyncio.Transport]:
- return self._transport
+ return self._protocol.transport
@property
def protocol(self) -> BaseProtocol:
@@ -71,10 +70,10 @@ def _write(self, chunk: bytes) -> None:
size = len(chunk)
self.buffer_size += size
self.output_size += size
-
- if self._transport is None or self._transport.is_closing():
+ transport = self.transport
+ if not self._protocol.connected or transport is None or transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
- self._transport.write(chunk)
+ transport.write(chunk)
async def write(
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
@@ -159,7 +158,6 @@ async def write_eof(self, chunk: bytes = b"") -> None:
await self.drain()
self._eof = True
- self._transport = None
async def drain(self) -> None:
"""Flush the write buffer.
diff --git a/tests/test_base_protocol.py b/tests/test_base_protocol.py
index f3b966bff54..a16b1f10cb1 100644
--- a/tests/test_base_protocol.py
+++ b/tests/test_base_protocol.py
@@ -45,10 +45,10 @@ async def test_connection_lost_not_paused() -> None:
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
- assert not pr._connection_lost
+ assert pr.connected
pr.connection_lost(None)
assert pr.transport is None
- assert pr._connection_lost
+ assert not pr.connected
async def test_connection_lost_paused_without_waiter() -> None:
@@ -56,11 +56,11 @@ async def test_connection_lost_paused_without_waiter() -> None:
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
- assert not pr._connection_lost
+ assert pr.connected
pr.pause_writing()
pr.connection_lost(None)
assert pr.transport is None
- assert pr._connection_lost
+ assert not pr.connected
async def test_drain_lost() -> None:
diff --git a/tests/test_client_proto.py b/tests/test_client_proto.py
index 85225c77dad..eea2830246a 100644
--- a/tests/test_client_proto.py
+++ b/tests/test_client_proto.py
@@ -134,3 +134,17 @@ async def test_eof_received(loop) -> None:
assert proto._read_timeout_handle is not None
proto.eof_received()
assert proto._read_timeout_handle is None
+
+
+async def test_connection_lost_sets_transport_to_none(loop, mocker) -> None:
+ """Ensure that the transport is set to None when the connection is lost.
+
+ This ensures the writer knows that the connection is closed.
+ """
+ proto = ResponseHandler(loop=loop)
+ proto.connection_made(mocker.Mock())
+ assert proto.transport is not None
+
+ proto.connection_lost(OSError())
+
+ assert proto.transport is None
diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py
index 8ebcfc654a5..5649f32f792 100644
--- a/tests/test_http_writer.py
+++ b/tests/test_http_writer.py
@@ -236,6 +236,21 @@ async def test_write_to_closing_transport(protocol, transport, loop) -> None:
await msg.write(b"After closing")
+async def test_write_to_closed_transport(protocol, transport, loop) -> None:
+ """Test that writing to a closed transport raises ConnectionResetError.
+
+ The StreamWriter checks to see if protocol.transport is None before
+ writing to the transport. If it is None, it raises ConnectionResetError.
+ """
+ msg = http.StreamWriter(protocol, loop)
+
+ await msg.write(b"Before transport close")
+ protocol.transport = None
+
+ with pytest.raises(ConnectionResetError, match="Cannot write to closing transport"):
+ await msg.write(b"After transport closed")
+
+
async def test_drain(protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.drain()
From 33953f110e97eecc707e1402daa8d543f38a189b Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sun, 12 Feb 2023 17:18:55 +0000
Subject: [PATCH 07/70] Release v3.8.4 (#7207)
---
CHANGES.rst | 15 +++++++++++++++
CHANGES/6638.bugfix | 1 -
CHANGES/7180.bugfix | 1 -
aiohttp/__init__.py | 2 +-
4 files changed, 16 insertions(+), 3 deletions(-)
delete mode 100644 CHANGES/6638.bugfix
delete mode 100644 CHANGES/7180.bugfix
diff --git a/CHANGES.rst b/CHANGES.rst
index 744130fb8e5..7713e622772 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,21 @@
.. towncrier release notes start
+3.8.4 (2023-02-12)
+==================
+
+Bugfixes
+--------
+
+- Fixed incorrectly overwriting cookies with the same name and domain, but different path.
+ `#6638 `_
+- Fixed ``ConnectionResetError`` not being raised after client disconnection in SSL environments.
+ `#7180 `_
+
+
+----
+
+
3.8.3 (2022-09-21)
==================
diff --git a/CHANGES/6638.bugfix b/CHANGES/6638.bugfix
deleted file mode 100644
index 8154dcfe3f3..00000000000
--- a/CHANGES/6638.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Do not overwrite cookies with same name and domain when the path is different.
diff --git a/CHANGES/7180.bugfix b/CHANGES/7180.bugfix
deleted file mode 100644
index 66980638868..00000000000
--- a/CHANGES/7180.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-``ConnectionResetError`` will always be raised when ``StreamWriter.write`` is called after ``connection_lost`` has been called on the ``BaseProtocol``
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index eea9a669cf9..34022f0f8c6 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.8.3.post0.dev0"
+__version__ = "3.8.4"
from typing import Tuple
From 29b6f302ad6736f71ef3fb029d4c5bac786a2fb7 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sun, 12 Feb 2023 21:03:12 +0000
Subject: [PATCH 08/70] Post release bump
---
aiohttp/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index 34022f0f8c6..a4fc5ec82d1 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.8.4"
+__version__ = "3.8.4.post0.dev0"
from typing import Tuple
From e71432e3c22a03e15f7f28e08e4c7f2ff55c7630 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 4 Jun 2023 20:57:42 +0200
Subject: [PATCH 09/70] [PR #7309/28438834 backport][3.8] Fix a couple of typos
in the docs (#7310)
Co-authored-by: Dan Davison
---
docs/client_quickstart.rst | 2 +-
docs/index.rst | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst
index 78be177d76c..e84209cec93 100644
--- a/docs/client_quickstart.rst
+++ b/docs/client_quickstart.rst
@@ -68,7 +68,7 @@ endpoints of ``http://httpbin.org`` can be used the following code::
.. note::
Don't create a session per request. Most likely you need a session
- per application which performs all requests altogether.
+ per application which performs all requests together.
More complex cases may require a session per site, e.g. one for
Github and other one for Facebook APIs. Anyway making a session for
diff --git a/docs/index.rst b/docs/index.rst
index bc4d6d4bf1f..0cd3380081a 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -50,8 +50,8 @@ This option is highly recommended:
$ pip install aiodns
-Installing speedups altogether
-------------------------------
+Installing all speedups in one command
+--------------------------------------
The following will get you ``aiohttp`` along with :term:`cchardet`,
:term:`aiodns` and ``Brotli`` in one bundle. No need to type
From 81ba8aa3e508018af541fdf5b2ee816735286cf3 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Fri, 9 Jun 2023 19:05:30 +0000
Subject: [PATCH 10/70] [PR #7316/6f3e7f46 backport][3.8] Update docs (#7317)
**This is a backport of PR #7316 as merged into master
(6f3e7f4687089ecb085a49aadbe372e5408fb9bb).**
Call main() does not returns any loop. Close #7314
Co-authored-by: Serhii A
---
docs/client_reference.rst | 3 +--
docs/http_request_lifecycle.rst | 6 ++----
docs/index.rst | 3 +--
docs/web_lowlevel.rst | 8 +-------
4 files changed, 5 insertions(+), 15 deletions(-)
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index 0281a27e81b..181f4c3d13e 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -32,8 +32,7 @@ Usage example::
html = await fetch(client)
print(html)
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
The client session supports the context manager protocol for self closing.
diff --git a/docs/http_request_lifecycle.rst b/docs/http_request_lifecycle.rst
index e14fb03de5f..22f6fbb8e30 100644
--- a/docs/http_request_lifecycle.rst
+++ b/docs/http_request_lifecycle.rst
@@ -77,8 +77,7 @@ So you are expected to reuse a session object and make many requests from it. Fo
html = await response.text()
print(html)
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
Can become this:
@@ -98,8 +97,7 @@ Can become this:
html = await fetch(session, 'http://python.org')
print(html)
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
On more complex code bases, you can even create a central registry to hold the session object from anywhere in the code, or a higher level ``Client`` class that holds a reference to it.
diff --git a/docs/index.rst b/docs/index.rst
index 0cd3380081a..172ac9b7dd7 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -83,8 +83,7 @@ Client example
html = await response.text()
print("Body:", html[:15], "...")
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
This prints:
diff --git a/docs/web_lowlevel.rst b/docs/web_lowlevel.rst
index dc81634ee3f..06c3265eb12 100644
--- a/docs/web_lowlevel.rst
+++ b/docs/web_lowlevel.rst
@@ -69,13 +69,7 @@ The following code demonstrates very trivial usage example::
await asyncio.sleep(100*3600)
- loop = asyncio.get_event_loop()
-
- try:
- loop.run_until_complete(main())
- except KeyboardInterrupt:
- pass
- loop.close()
+ asyncio.run(main())
In the snippet we have ``handler`` which returns a regular
From b1fbb49b94b81d0fe89f55e28b432a7a1c404ffa Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?=
Date: Sat, 8 Jul 2023 00:41:32 +0200
Subject: [PATCH 11/70] [3.8] Make the 3.10 related xfails non-strict (#7178)
---
tests/test_client_request.py | 1 +
tests/test_streams.py | 1 +
tests/test_web_app.py | 1 +
3 files changed, 3 insertions(+)
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index ea4ebb4af10..9eeb933d312 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -280,6 +280,7 @@ def test_host_header_ipv6_with_port(make_request) -> None:
reason="No idea why ClientRequest() is constructed out of loop but "
"it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
+ strict=False,
)
def test_default_loop(loop) -> None:
asyncio.set_event_loop(loop)
diff --git a/tests/test_streams.py b/tests/test_streams.py
index 23f159a0e3b..4de6fba5d74 100644
--- a/tests/test_streams.py
+++ b/tests/test_streams.py
@@ -88,6 +88,7 @@ async def test_create_waiter(self) -> None:
reason="No idea why ClientRequest() is constructed out of loop but "
"it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
+ strict=False,
)
def test_ctor_global_loop(self) -> None:
loop = asyncio.new_event_loop()
diff --git a/tests/test_web_app.py b/tests/test_web_app.py
index 165b3776cef..13da52d5fca 100644
--- a/tests/test_web_app.py
+++ b/tests/test_web_app.py
@@ -44,6 +44,7 @@ async def test_set_loop() -> None:
reason="No idea why _set_loop() is constructed out of loop "
"but it calls `asyncio.get_event_loop()`",
raises=DeprecationWarning,
+ strict=False,
)
def test_set_loop_default_loop() -> None:
loop = asyncio.new_event_loop()
From 9a41831b1f352c6d441a38105d8821ec5b7c9cb0 Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko
Date: Sat, 8 Jul 2023 00:53:22 +0200
Subject: [PATCH 12/70] Upgrade chat links and badges to Matrix (#7345)
(cherry picked from commit fb7a0a176909a8a7926c1eccf3a82023ce2c65f7)
---
README.rst | 12 ++++++------
setup.cfg | 3 ++-
2 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/README.rst b/README.rst
index b283f969a68..5841cbcb542 100644
--- a/README.rst
+++ b/README.rst
@@ -25,13 +25,13 @@ Async http client/server framework
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
-.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
- :target: https://aio-libs.discourse.group
- :alt: Discourse status
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
-.. image:: https://badges.gitter.im/Join%20Chat.svg
- :target: https://gitter.im/aio-libs/Lobby
- :alt: Chat on Gitter
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
Key Features
diff --git a/setup.cfg b/setup.cfg
index 2b19c5283c6..47ffbc5d209 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -3,7 +3,8 @@ name = aiohttp
version = attr: aiohttp.__version__
url = https://github.com/aio-libs/aiohttp
project_urls =
- Chat: Gitter = https://gitter.im/aio-libs/Lobby
+ Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org
+ Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org
CI: GitHub Actions = https://github.com/aio-libs/aiohttp/actions?query=workflow%%3ACI
Coverage: codecov = https://codecov.io/github/aio-libs/aiohttp
Docs: Changelog = https://docs.aiohttp.org/en/stable/changes.html
From a4bf0e8b3681fe13218d2a5edf49a608c48b7af0 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sat, 8 Jul 2023 12:57:05 +0000
Subject: [PATCH 13/70] =?UTF-8?q?[PR=20#7348/bf9d753e=20backport][3.8]=20?=
=?UTF-8?q?=F0=9F=90=9B=20Fix=20RST=20in=20changelog=20template=20before?=
=?UTF-8?q?=20links=20(#7349)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
**This is a backport of PR #7348 as merged into master
(bf9d753edc928e7ecbc590c32603ebd3c1fc6282).**
In corner cases, changelog fragments with things like detached link
definitions (example: #7346) cause RST rendering errors. This patch
corrects this by injecting empty lines between the changelog entry
bodies and their reference lists.
Co-authored-by: Sviatoslav Sydorenko
---
CHANGES/.TEMPLATE.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGES/.TEMPLATE.rst b/CHANGES/.TEMPLATE.rst
index bc6016baf5c..a27a1994b53 100644
--- a/CHANGES/.TEMPLATE.rst
+++ b/CHANGES/.TEMPLATE.rst
@@ -12,8 +12,8 @@
{% if definitions[category]['showcontent'] %}
{% for text, values in sections[section][category].items() %}
-- {{ text }}
- {{ values|join(',\n ') }}
+- {{ text + '\n' }}
+ {{ values|join(',\n ') + '\n' }}
{% endfor %}
{% else %}
From 40874103ebfaa1007d47c25ecc4288af873a07cf Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sat, 8 Jul 2023 23:42:35 +0000
Subject: [PATCH 14/70] =?UTF-8?q?[PR=20#7346/346fd202=20backport][3.8]=20?=
=?UTF-8?q?=EF=A3=94=20Bump=20vendored=20llhttp=20to=20v8.1.1=20(#7352)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: Sviatoslav Sydorenko
---
.gitmodules | 2 +-
CHANGES/7346.feature | 5 +++++
docs/spelling_wordlist.txt | 1 +
3 files changed, 7 insertions(+), 1 deletion(-)
create mode 100644 CHANGES/7346.feature
diff --git a/.gitmodules b/.gitmodules
index 4a06d737c9c..1e901ef79f2 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,4 +1,4 @@
[submodule "vendor/llhttp"]
path = vendor/llhttp
url = https://github.com/nodejs/llhttp.git
- branch = v6.0.6
+ branch = v8.1.1
diff --git a/CHANGES/7346.feature b/CHANGES/7346.feature
new file mode 100644
index 00000000000..9f91e6b7424
--- /dev/null
+++ b/CHANGES/7346.feature
@@ -0,0 +1,5 @@
+Upgrade the vendored copy of llhttp_ to v8.1.1 -- by :user:`webknjaz`.
+
+Thanks to :user:`sethmlarson` for pointing this out!
+
+.. _llhttp: https://llhttp.org
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 47089e25212..af6c4082e70 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -336,6 +336,7 @@ utils
uvloop
uWSGI
vcvarsall
+vendored
waituntil
wakeup
wakeups
From dd8e24e77351df9c0f029be49d3c6d7862706e79 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 9 Jul 2023 01:24:09 +0000
Subject: [PATCH 15/70] [PR #7343/18057581 backport][3.8] Mention encoding in
`yarl.URL` (#7355)
Co-authored-by: Sam Bull
---
docs/client_reference.rst | 12 +++++++++---
1 file changed, 9 insertions(+), 3 deletions(-)
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index 181f4c3d13e..8d9abe37eb0 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -346,7 +346,9 @@ The client session supports the context manager protocol for self closing.
:param str method: HTTP method
- :param url: Request URL, :class:`str` or :class:`~yarl.URL`.
+ :param url: Request URL, :class:`~yarl.URL` or :class:`str` that will
+ be encoded with :class:`~yarl.URL` (see :class:`~yarl.URL`
+ to skip encoding).
:param params: Mapping, iterable of tuple of *key*/*value* pairs or
string to be sent as parameters in the query
@@ -661,7 +663,9 @@ The client session supports the context manager protocol for self closing.
Create a websocket connection. Returns a
:class:`ClientWebSocketResponse` object.
- :param url: Websocket server url, :class:`str` or :class:`~yarl.URL`
+ :param url: Websocket server url, :class:`~yarl.URL` or :class:`str` that
+ will be encoded with :class:`~yarl.URL` (see :class:`~yarl.URL`
+ to skip encoding).
:param tuple protocols: Websocket protocols
@@ -826,7 +830,9 @@ certification chaining.
:param str method: HTTP method
- :param url: Requested URL, :class:`str` or :class:`~yarl.URL`
+ :param url: Request URL, :class:`~yarl.URL` or :class:`str` that will
+ be encoded with :class:`~yarl.URL` (see :class:`~yarl.URL`
+ to skip encoding).
:param dict params: Parameters to be sent in the query
string of the new request (optional)
From 8d45f9c99511cd80140d6658bd9c11002c697f1c Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 9 Jul 2023 01:35:28 +0000
Subject: [PATCH 16/70] [PR #7333/3a54d378 backport][3.8] Fix TLS transport is
`None` error (#7357)
Co-authored-by: Sam Bull
Fixes #3355
---
CHANGES/3355.bugfix | 1 +
aiohttp/connector.py | 3 +++
2 files changed, 4 insertions(+)
create mode 100644 CHANGES/3355.bugfix
diff --git a/CHANGES/3355.bugfix b/CHANGES/3355.bugfix
new file mode 100644
index 00000000000..fd002cb00df
--- /dev/null
+++ b/CHANGES/3355.bugfix
@@ -0,0 +1 @@
+Fixed a transport is :data:`None` error -- by :user:`Dreamsorcerer`.
diff --git a/aiohttp/connector.py b/aiohttp/connector.py
index bf40689d81b..2499a2dabe9 100644
--- a/aiohttp/connector.py
+++ b/aiohttp/connector.py
@@ -1121,6 +1121,9 @@ async def _start_tls_connection(
f"[{type_err!s}]"
) from type_err
else:
+ if tls_transport is None:
+ msg = "Failed to start TLS (possibly caused by closing transport)"
+ raise client_error(req.connection_key, OSError(msg))
tls_proto.connection_made(
tls_transport
) # Kick the state machine of the new TLS protocol
From 3577b1e3719d4648fa973dbdec927f78f9df34dd Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 9 Jul 2023 15:19:46 +0200
Subject: [PATCH 17/70] =?UTF-8?q?[PR=20#7359/7911f1e9=20backport][3.8]=20?=
=?UTF-8?q?=EF=A3=94=20Set=20up=20secretless=20publishing=20to=20PyPI=20(#?=
=?UTF-8?q?7360)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: Sviatoslav Sydorenko
---
.github/workflows/{ci.yml => ci-cd.yml} | 32 +++++++++++++++++++++++--
1 file changed, 30 insertions(+), 2 deletions(-)
rename .github/workflows/{ci.yml => ci-cd.yml} (91%)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci-cd.yml
similarity index 91%
rename from .github/workflows/ci.yml
rename to .github/workflows/ci-cd.yml
index 27a2e63215e..b502e51d6b2 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci-cd.yml
@@ -347,9 +347,17 @@ jobs:
deploy:
name: Deploy
- environment: release
needs: [build-tarball, build-wheels]
runs-on: ubuntu-latest
+
+ permissions:
+ contents: write # IMPORTANT: mandatory for making GitHub Releases
+ id-token: write # IMPORTANT: mandatory for trusted publishing & sigstore
+
+ environment:
+ name: pypi
+ url: https://pypi.org/p/aiohttp
+
steps:
- name: Checkout
uses: actions/checkout@v2.4.0
@@ -376,7 +384,27 @@ jobs:
name: aiohttp
version_file: aiohttp/__init__.py
github_token: ${{ secrets.GITHUB_TOKEN }}
- pypi_token: ${{ secrets.PYPI_API_TOKEN }}
dist_dir: dist
fix_issue_regex: "`#(\\d+) `_"
fix_issue_repl: "(#\\1)"
+
+ - name: >-
+ Publish 🐍📦 to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+
+ - name: Sign the dists with Sigstore
+ uses: sigstore/gh-action-sigstore-python@v1.2.3
+ with:
+ inputs: >-
+ ./dist/*.tar.gz
+ ./dist/*.whl
+
+ - name: Upload artifact signatures to GitHub Release
+ # Confusingly, this action also supports updating releases, not
+ # just creating them. This is what we want here, since we've manually
+ # created the release above.
+ uses: softprops/action-gh-release@v1
+ with:
+ # dist/ contains the built packages, which smoketest-artifacts/
+ # contains the signatures and certificates.
+ files: dist/**
From 01d9b70e5477cd746561b52225992d8a2ebde953 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Tue, 18 Jul 2023 13:01:36 +0100
Subject: [PATCH 18/70] [PR #7370/22c264ce backport][3.8] fix: Spelling error
fixed (#7371)
**This is a backport of PR #7370 as merged into master
(22c264ce70c9ff45eb2d21a090a874a4ffcdb469).**
## What do these changes do?
Fixes #7351
## Are there changes in behavior for the user?
## Related issue number
## Checklist
- [x] I think the code is well written
- [x] Unit tests for the changes exist
- [x] Documentation reflects the changes
- [x] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
* The format is <Name> <Surname>.
* Please keep alphabetical order, the file is sorted by names.
- [x] Add a new news fragment into the `CHANGES` folder
* name it `.` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
* ensure type is one of the following:
* `.feature`: Signifying a new feature.
* `.bugfix`: Signifying a bug fix.
* `.doc`: Signifying a documentation improvement.
* `.removal`: Signifying a deprecation or removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."
Co-authored-by: Shamil Abdulaev <112097588+abdulaev-sh-m@users.noreply.github.com>
---
aiohttp/payload.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aiohttp/payload.py b/aiohttp/payload.py
index 625b2eaccec..a2340e2945e 100644
--- a/aiohttp/payload.py
+++ b/aiohttp/payload.py
@@ -423,7 +423,7 @@ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError(
"value argument must support "
- "collections.abc.AsyncIterablebe interface, "
+ "collections.abc.AsyncIterable interface, "
"got {!r}".format(type(value))
)
From f07e9b44b5cb909054a697c8dd447b30dbf8073e Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Tue, 18 Jul 2023 19:20:07 +0100
Subject: [PATCH 19/70] [PR #7373/66e261a5 backport][3.8] Drop azure mention
(#7374)
**This is a backport of PR #7373 as merged into master
(66e261a5a0a3724d7aad2f069c62e78c46ccc36d).**
None
Co-authored-by: Sam Bull
---
docs/index.rst | 3 ---
1 file changed, 3 deletions(-)
diff --git a/docs/index.rst b/docs/index.rst
index 172ac9b7dd7..44c01a7b08c 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -140,9 +140,6 @@ Please feel free to file an issue on the `bug tracker
`_ if you have found a bug
or have some suggestion in order to improve the library.
-The library uses `Azure Pipelines `_ for
-Continuous Integration.
-
Dependencies
============
From 9337fb3f2ab2b5f38d7e98a194bde6f7e3d16c40 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Tue, 18 Jul 2023 20:45:53 +0100
Subject: [PATCH 20/70] Fix bump llhttp to v8.1.1 (#7367) (#7377)
(cherry picked from commit 41e2c4c1fdf34d800bb9e4247cd9ebfc6904d9d7)
---
.gitmodules | 2 +-
docs/contributing.rst | 2 ++
tests/test_http_parser.py | 1 +
vendor/README.rst | 23 +++++++++++++++++++++++
vendor/llhttp | 2 +-
5 files changed, 28 insertions(+), 2 deletions(-)
create mode 100644 vendor/README.rst
diff --git a/.gitmodules b/.gitmodules
index 1e901ef79f2..6edb2eea5b2 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,4 +1,4 @@
[submodule "vendor/llhttp"]
path = vendor/llhttp
url = https://github.com/nodejs/llhttp.git
- branch = v8.1.1
+ branch = v8.x
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 10a4a4bc2d7..b1dfdf01139 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -110,6 +110,8 @@ Install pre-commit hooks:
Congratulations, you are ready to run the test suite!
+.. include:: ../vendor/README.rst
+
Run autoformatter
-----------------
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index cc30629dd0d..950c243ef6b 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -685,6 +685,7 @@ def test_http_response_parser_bad(response) -> None:
response.feed_data(b"HTT/1\r\n\r\n")
+@pytest.mark.skipif(not NO_EXTENSIONS, reason="Behaviour has changed in C parser")
def test_http_response_parser_code_under_100(response) -> None:
msg = response.feed_data(b"HTTP/1.1 99 test\r\n\r\n")[0][0][0]
assert msg.code == 99
diff --git a/vendor/README.rst b/vendor/README.rst
new file mode 100644
index 00000000000..1f10e20cce2
--- /dev/null
+++ b/vendor/README.rst
@@ -0,0 +1,23 @@
+LLHTTP
+------
+
+When building aiohttp from source, there is a pure Python parser used by default.
+For better performance, you may want to build the higher performance C parser.
+
+To build this ``llhttp`` parser, first get/update the submodules (to update to a
+newer release, add ``--remote`` and check the branch in ``.gitmodules``)::
+
+ git submodule update --init --recursive
+
+Then build ``llhttp``::
+
+ cd vendor/llhttp/
+ npm install
+ make
+
+Then build our parser::
+
+ cd -
+ make cythonize
+
+Then you can build or install it with ``python -m build`` or ``pip install -e .``
diff --git a/vendor/llhttp b/vendor/llhttp
index 69d6db20085..7e18596bae8 160000
--- a/vendor/llhttp
+++ b/vendor/llhttp
@@ -1 +1 @@
-Subproject commit 69d6db2008508489d19267a0dcab30602b16fc5b
+Subproject commit 7e18596bae8f63692ded9d3250d5d984fe90dcfb
From 135a45e9d655d56e4ebad78abe84f1cb7b5c62dc Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Tue, 18 Jul 2023 21:43:36 +0100
Subject: [PATCH 21/70] Improve error messages from C parser (#7366) (#7380)
(cherry picked from commit 1a48add026e310bb42b7bd38689b281f6651d127)
---
CHANGES/7366.feature | 1 +
aiohttp/_http_parser.pyx | 12 +++++++++---
aiohttp/http_exceptions.py | 6 ++++--
tests/test_http_exceptions.py | 22 ++++++++++------------
tests/test_http_parser.py | 31 ++++++++++++++++++++++++++-----
5 files changed, 50 insertions(+), 22 deletions(-)
create mode 100644 CHANGES/7366.feature
diff --git a/CHANGES/7366.feature b/CHANGES/7366.feature
new file mode 100644
index 00000000000..8e38f70f898
--- /dev/null
+++ b/CHANGES/7366.feature
@@ -0,0 +1 @@
+Added information to C parser exceptions to show which character caused the error. -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index bebd9894374..4f39dd0c978 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -546,7 +546,13 @@ cdef class HttpParser:
ex = self._last_error
self._last_error = None
else:
- ex = parser_error_from_errno(self._cparser)
+ after = cparser.llhttp_get_error_pos(self._cparser)
+ before = data[:after - self.py_buf.buf]
+ after_b = after.split(b"\n", 1)[0]
+ before = before.rsplit(b"\n", 1)[-1]
+ data = before + after_b
+ pointer = " " * (len(repr(before))-1) + "^"
+ ex = parser_error_from_errno(self._cparser, data, pointer)
self._payload = None
raise ex
@@ -797,7 +803,7 @@ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
return 0
-cdef parser_error_from_errno(cparser.llhttp_t* parser):
+cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
@@ -829,4 +835,4 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser):
else:
cls = BadHttpMessage
- return cls(desc.decode('latin-1'))
+ return cls("{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer))
diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py
index c885f80f322..b5d16ea4ec1 100644
--- a/aiohttp/http_exceptions.py
+++ b/aiohttp/http_exceptions.py
@@ -1,6 +1,7 @@
"""Low-level http related exceptions."""
+from textwrap import indent
from typing import Optional, Union
from .typedefs import _CIMultiDict
@@ -35,10 +36,11 @@ def __init__(
self.message = message
def __str__(self) -> str:
- return f"{self.code}, message={self.message!r}"
+ msg = indent(self.message, " ")
+ return f"{self.code}, message:\n{msg}"
def __repr__(self) -> str:
- return f"<{self.__class__.__name__}: {self}>"
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
class BadHttpMessage(HttpProcessingError):
diff --git a/tests/test_http_exceptions.py b/tests/test_http_exceptions.py
index 26a5adb3bfc..29d5b91fa29 100644
--- a/tests/test_http_exceptions.py
+++ b/tests/test_http_exceptions.py
@@ -31,13 +31,13 @@ def test_str(self) -> None:
err = http_exceptions.HttpProcessingError(
code=500, message="Internal error", headers={}
)
- assert str(err) == "500, message='Internal error'"
+ assert str(err) == "500, message:\n Internal error"
def test_repr(self) -> None:
err = http_exceptions.HttpProcessingError(
code=500, message="Internal error", headers={}
)
- assert repr(err) == ("")
+ assert repr(err) == ("")
class TestBadHttpMessage:
@@ -60,7 +60,7 @@ def test_pickle(self) -> None:
def test_str(self) -> None:
err = http_exceptions.BadHttpMessage(message="Bad HTTP message", headers={})
- assert str(err) == "400, message='Bad HTTP message'"
+ assert str(err) == "400, message:\n Bad HTTP message"
def test_repr(self) -> None:
err = http_exceptions.BadHttpMessage(message="Bad HTTP message", headers={})
@@ -87,9 +87,8 @@ def test_pickle(self) -> None:
def test_str(self) -> None:
err = http_exceptions.LineTooLong(line="spam", limit="10", actual_size="12")
- assert str(err) == (
- "400, message='Got more than 10 bytes (12) " "when reading spam.'"
- )
+ expected = "400, message:\n Got more than 10 bytes (12) when reading spam."
+ assert str(err) == expected
def test_repr(self) -> None:
err = http_exceptions.LineTooLong(line="spam", limit="10", actual_size="12")
@@ -119,25 +118,24 @@ def test_pickle(self) -> None:
def test_str(self) -> None:
err = http_exceptions.InvalidHeader(hdr="X-Spam")
- assert str(err) == "400, message='Invalid HTTP Header: X-Spam'"
+ assert str(err) == "400, message:\n Invalid HTTP Header: X-Spam"
def test_repr(self) -> None:
err = http_exceptions.InvalidHeader(hdr="X-Spam")
- assert repr(err) == (
- ""
- )
+ expected = ""
+ assert repr(err) == expected
class TestBadStatusLine:
def test_ctor(self) -> None:
err = http_exceptions.BadStatusLine("Test")
assert err.line == "Test"
- assert str(err) == "400, message=\"Bad status line 'Test'\""
+ assert str(err) == "400, message:\n Bad status line 'Test'"
def test_ctor2(self) -> None:
err = http_exceptions.BadStatusLine(b"")
assert err.line == "b''"
- assert str(err) == "400, message='Bad status line \"b\\'\\'\"'"
+ assert str(err) == "400, message:\n Bad status line \"b''\""
def test_pickle(self) -> None:
err = http_exceptions.BadStatusLine("Test")
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 950c243ef6b..ca6c32207ce 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -1,6 +1,7 @@
# Tests for aiohttp/protocol.py
import asyncio
+import re
from typing import Any, List
from unittest import mock
from urllib.parse import quote
@@ -118,6 +119,26 @@ def test_parse_headers(parser: Any) -> None:
assert not msg.upgrade
+@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
+def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None:
+ parser = HttpRequestParserC(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = b"POST / HTTP/1.1\r\nHost: localhost:8080\r\nSet-Cookie: abc\x01def\r\n\r\n"
+ error_detail = re.escape(
+ r""":
+
+ b'Set-Cookie: abc\x01def\r'
+ ^"""
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage, match=error_detail):
+ parser.feed_data(text)
+
+
def test_parse(parser) -> None:
text = b"GET /test HTTP/1.1\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
@@ -429,7 +450,7 @@ def test_max_header_field_size(parser, size) -> None:
name = b"t" * size
text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n"
- match = f"400, message='Got more than 8190 bytes \\({size}\\) when reading"
+ match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(text)
@@ -457,7 +478,7 @@ def test_max_header_value_size(parser, size) -> None:
name = b"t" * size
text = b"GET /test HTTP/1.1\r\n" b"data:" + name + b"\r\n\r\n"
- match = f"400, message='Got more than 8190 bytes \\({size}\\) when reading"
+ match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(text)
@@ -485,7 +506,7 @@ def test_max_header_value_size_continuation(parser, size) -> None:
name = b"T" * (size - 5)
text = b"GET /test HTTP/1.1\r\n" b"data: test\r\n " + name + b"\r\n\r\n"
- match = f"400, message='Got more than 8190 bytes \\({size}\\) when reading"
+ match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(text)
@@ -608,7 +629,7 @@ def test_http_request_parser_bad_version(parser) -> None:
@pytest.mark.parametrize("size", [40965, 8191])
def test_http_request_max_status_line(parser, size) -> None:
path = b"t" * (size - 5)
- match = f"400, message='Got more than 8190 bytes \\({size}\\) when reading"
+ match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
parser.feed_data(b"GET /path" + path + b" HTTP/1.1\r\n\r\n")
@@ -651,7 +672,7 @@ def test_http_response_parser_utf8(response) -> None:
@pytest.mark.parametrize("size", [40962, 8191])
def test_http_response_parser_bad_status_line_too_long(response, size) -> None:
reason = b"t" * (size - 2)
- match = f"400, message='Got more than 8190 bytes \\({size}\\) when reading"
+ match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading"
with pytest.raises(http_exceptions.LineTooLong, match=match):
response.feed_data(b"HTTP/1.1 200 Ok" + reason + b"\r\n\r\n")
From 7c02129567bc4ec59be467b70fc937c82920948c Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko
Date: Wed, 19 Jul 2023 16:27:49 +0200
Subject: [PATCH 22/70] =?UTF-8?q?=EF=A3=94=20Bump=20pypa/cibuildwheel=20to?=
=?UTF-8?q?=20v2.14.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
(cherry picked from commit 985c448dd1f0b2323de8303193487cfd4d6485ce)
---
.github/workflows/ci-cd.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index b502e51d6b2..4c0c0e40b83 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -337,7 +337,7 @@ jobs:
run: |
make cythonize
- name: Build wheels
- uses: pypa/cibuildwheel@v2.10.1
+ uses: pypa/cibuildwheel@v2.14.1
env:
CIBW_ARCHS_MACOS: x86_64 arm64 universal2
- uses: actions/upload-artifact@v2
From 9c13a52c21c23dfdb49ed89418d28a5b116d0681 Mon Sep 17 00:00:00 2001
From: Sviatoslav Sydorenko
Date: Sun, 9 Jul 2023 19:59:59 +0200
Subject: [PATCH 23/70] Bump aiohttp to v3.8.5 a security release
Ref: https://github.com/aio-libs/aiohttp/security/advisories/GHSA-45c4-8wx5-qw6w
---
CHANGES.rst | 39 ++++++++++++++++++++++++++++++++++++++
CHANGES/3355.bugfix | 1 -
CHANGES/7346.feature | 5 -----
CHANGES/7366.feature | 1 -
aiohttp/__init__.py | 2 +-
docs/spelling_wordlist.txt | 1 +
6 files changed, 41 insertions(+), 8 deletions(-)
delete mode 100644 CHANGES/3355.bugfix
delete mode 100644 CHANGES/7346.feature
delete mode 100644 CHANGES/7366.feature
diff --git a/CHANGES.rst b/CHANGES.rst
index 7713e622772..d929a0acc0d 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,45 @@
.. towncrier release notes start
+3.8.5 (2023-07-19)
+==================
+
+Security bugfixes
+-----------------
+
+- Upgraded the vendored copy of llhttp_ to v8.1.1 -- by :user:`webknjaz`
+ and :user:`Dreamsorcerer`.
+
+ Thanks to :user:`sethmlarson` for reporting this and providing us with
+ comprehensive reproducer, workarounds and fixing details! For more
+ information, see
+ https://github.com/aio-libs/aiohttp/security/advisories/GHSA-45c4-8wx5-qw6w.
+
+ .. _llhttp: https://llhttp.org
+
+ `#7346 `_
+
+
+Features
+--------
+
+- Added information to C parser exceptions to show which character caused the error. -- by :user:`Dreamsorcerer`
+
+ `#7366 `_
+
+
+Bugfixes
+--------
+
+- Fixed a transport is :data:`None` error -- by :user:`Dreamsorcerer`.
+
+ `#3355 `_
+
+
+
+----
+
+
3.8.4 (2023-02-12)
==================
diff --git a/CHANGES/3355.bugfix b/CHANGES/3355.bugfix
deleted file mode 100644
index fd002cb00df..00000000000
--- a/CHANGES/3355.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fixed a transport is :data:`None` error -- by :user:`Dreamsorcerer`.
diff --git a/CHANGES/7346.feature b/CHANGES/7346.feature
deleted file mode 100644
index 9f91e6b7424..00000000000
--- a/CHANGES/7346.feature
+++ /dev/null
@@ -1,5 +0,0 @@
-Upgrade the vendored copy of llhttp_ to v8.1.1 -- by :user:`webknjaz`.
-
-Thanks to :user:`sethmlarson` for pointing this out!
-
-.. _llhttp: https://llhttp.org
diff --git a/CHANGES/7366.feature b/CHANGES/7366.feature
deleted file mode 100644
index 8e38f70f898..00000000000
--- a/CHANGES/7366.feature
+++ /dev/null
@@ -1 +0,0 @@
-Added information to C parser exceptions to show which character caused the error. -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index a4fc5ec82d1..317a47bbad8 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.8.4.post0.dev0"
+__version__ = "3.8.5"
from typing import Tuple
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index af6c4082e70..1626b180944 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -39,6 +39,7 @@ botocore
brotli
brotlipy
bugfix
+bugfixes
Bugfixes
builtin
BytesIO
From 8d701c3d2811070213c7309f873167b2b8b3655b Mon Sep 17 00:00:00 2001
From: Jan Gosmann
Date: Sat, 22 Jul 2023 15:42:25 +0200
Subject: [PATCH 24/70] Fix PermissionError when loading .netrc (#7237) (#7378)
(#7395)
## What do these changes do?
If no NETRC environment variable is provided and the .netrc path cannot
be accessed due to missing permission, a PermissionError was raised
instead of returning None. See issue #7237. This PR fixes the issue.
If the changes look good, I can also prepare backports.
## Are there changes in behavior for the user?
If the .netrc cannot be accessed due to a permission problem (and the
`NETRC` environment variable is unset), no `PermissionError` will be
raised. Instead it will be silently ignored.
## Related issue number
Fixes #7237
Backport of #7378
(cherry picked from commit 0d2e43bf2a920975a5da4d9295e0ba887080bf5b)
## Checklist
- [x] I think the code is well written
- [x] Unit tests for the changes exist
- [x] Documentation reflects the changes
- [x] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
* The format is <Name> <Surname>.
* Please keep alphabetical order, the file is sorted by names.
- [x] Add a new news fragment into the `CHANGES` folder
* name it `.` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
* ensure type is one of the following:
* `.feature`: Signifying a new feature.
* `.bugfix`: Signifying a bug fix.
* `.doc`: Signifying a documentation improvement.
* `.removal`: Signifying a deprecation or removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."
---
CHANGES/7237.bugfix | 1 +
CONTRIBUTORS.txt | 1 +
aiohttp/helpers.py | 7 +++++--
tests/test_helpers.py | 22 +++++++++++++++++++++-
4 files changed, 28 insertions(+), 3 deletions(-)
create mode 100644 CHANGES/7237.bugfix
diff --git a/CHANGES/7237.bugfix b/CHANGES/7237.bugfix
new file mode 100644
index 00000000000..26f85ea9c95
--- /dev/null
+++ b/CHANGES/7237.bugfix
@@ -0,0 +1 @@
+Fixed ``PermissionError`` when .netrc is unreadable due to permissions.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 6c2fabbdece..8e31468aee6 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -153,6 +153,7 @@ Jake Davis
Jakob Ackermann
Jakub Wilk
Jan Buchar
+Jan Gosmann
Jashandeep Sohi
Jens Steinhauser
Jeonghun Lee
diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py
index 874ab1ac076..eb0782ff37f 100644
--- a/aiohttp/helpers.py
+++ b/aiohttp/helpers.py
@@ -3,6 +3,7 @@
import asyncio
import base64
import binascii
+import contextlib
import datetime
import functools
import inspect
@@ -226,8 +227,11 @@ def netrc_from_env() -> Optional[netrc.netrc]:
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
+ netrc_exists = False
+ with contextlib.suppress(OSError):
+ netrc_exists = netrc_path.is_file()
# we couldn't read the file (doesn't exist, permissions, etc.)
- if netrc_env or netrc_path.is_file():
+ if netrc_env or netrc_exists:
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
@@ -742,7 +746,6 @@ def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
class HeadersMixin:
-
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
_content_type: Optional[str] = None
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
index d192545de7f..6099afac22a 100644
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -5,6 +5,7 @@
import platform
import tempfile
from math import isclose, modf
+from pathlib import Path
from unittest import mock
from urllib.request import getproxies_environment
@@ -178,7 +179,6 @@ def test_basic_auth_from_not_url() -> None:
class ReifyMixin:
-
reify = NotImplemented
def test_reify(self) -> None:
@@ -763,3 +763,23 @@ def test_repr(self) -> None:
)
def test_parse_http_date(value, expected):
assert parse_http_date(value) == expected
+
+
+@pytest.fixture
+def protected_dir(tmp_path: Path):
+ protected_dir = tmp_path / "protected"
+ protected_dir.mkdir()
+ try:
+ protected_dir.chmod(0o600)
+ yield protected_dir
+ finally:
+ protected_dir.rmdir()
+
+
+def test_netrc_from_home_does_not_raise_if_access_denied(
+ protected_dir: Path, monkeypatch: pytest.MonkeyPatch
+):
+ monkeypatch.setattr(Path, "home", lambda: protected_dir)
+ monkeypatch.delenv("NETRC", raising=False)
+
+ helpers.netrc_from_env()
From 8129d26f3a02faa910ddfa49fc503de692b7bf7c Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 6 Aug 2023 12:55:19 +0000
Subject: [PATCH 25/70] [PR #7480/1fb06bbc backport][3.8] Fix error pointer on
linebreaks (#7482)
**This is a backport of PR #7480 as merged into master
(1fb06bbc10a7bd621c694a33137cf16b23b07a02).**
Fixes #7468.
Co-authored-by: Sam Bull
---
CHANGES/7468.bugfix | 1 +
aiohttp/_http_parser.pyx | 4 ++--
tests/test_http_parser.py | 22 +++++++++++++++++++++-
3 files changed, 24 insertions(+), 3 deletions(-)
create mode 100644 CHANGES/7468.bugfix
diff --git a/CHANGES/7468.bugfix b/CHANGES/7468.bugfix
new file mode 100644
index 00000000000..3f9c256ca0c
--- /dev/null
+++ b/CHANGES/7468.bugfix
@@ -0,0 +1 @@
+Fixed output of parsing errors on `\n`. -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index 4f39dd0c978..8b7d48245d2 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -548,8 +548,8 @@ cdef class HttpParser:
else:
after = cparser.llhttp_get_error_pos(self._cparser)
before = data[:after - self.py_buf.buf]
- after_b = after.split(b"\n", 1)[0]
- before = before.rsplit(b"\n", 1)[-1]
+ after_b = after.split(b"\r\n", 1)[0]
+ before = before.rsplit(b"\r\n", 1)[-1]
data = before + after_b
pointer = " " * (len(repr(before))-1) + "^"
ex = parser_error_from_errno(self._cparser, data, pointer)
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index ca6c32207ce..344d0776633 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -132,7 +132,27 @@ def test_invalid_character(loop: Any, protocol: Any, request: Any) -> None:
error_detail = re.escape(
r""":
- b'Set-Cookie: abc\x01def\r'
+ b'Set-Cookie: abc\x01def'
+ ^"""
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage, match=error_detail):
+ parser.feed_data(text)
+
+
+@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
+def test_invalid_linebreak(loop: Any, protocol: Any, request: Any) -> None:
+ parser = HttpRequestParserC(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = b"GET /world HTTP/1.1\r\nHost: 127.0.0.1\n\r\n"
+ error_detail = re.escape(
+ r""":
+
+ b'Host: 127.0.0.1\n'
^"""
)
with pytest.raises(http_exceptions.BadHttpMessage, match=error_detail):
From f92b27b01fb026c76caa69a88589dcea1f0ceafd Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sun, 6 Aug 2023 16:54:35 +0100
Subject: [PATCH 26/70] Update to LLHTTP 9 (#7485) (#7487)
(cherry picked from commit c0c7508b64951b83ea38d232857cc258159f930b)
---
.gitmodules | 2 +-
CHANGES/7484.misc | 1 +
aiohttp/_cparser.pxd | 38 ++------------------------------------
tests/test_http_parser.py | 15 ++-------------
vendor/README.rst | 2 +-
vendor/llhttp | 2 +-
6 files changed, 8 insertions(+), 52 deletions(-)
create mode 100644 CHANGES/7484.misc
diff --git a/.gitmodules b/.gitmodules
index 6edb2eea5b2..498bf7eb102 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,4 +1,4 @@
[submodule "vendor/llhttp"]
path = vendor/llhttp
url = https://github.com/nodejs/llhttp.git
- branch = v8.x
+ branch = main
diff --git a/CHANGES/7484.misc b/CHANGES/7484.misc
new file mode 100644
index 00000000000..ff540ceb709
--- /dev/null
+++ b/CHANGES/7484.misc
@@ -0,0 +1 @@
+Upgraded llhttp parser to v9 -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/_cparser.pxd b/aiohttp/_cparser.pxd
index 165dd61d8a9..9dfb04134d1 100644
--- a/aiohttp/_cparser.pxd
+++ b/aiohttp/_cparser.pxd
@@ -1,13 +1,4 @@
-from libc.stdint cimport (
- int8_t,
- int16_t,
- int32_t,
- int64_t,
- uint8_t,
- uint16_t,
- uint32_t,
- uint64_t,
-)
+from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
cdef extern from "../vendor/llhttp/build/llhttp.h":
@@ -88,30 +79,14 @@ cdef extern from "../vendor/llhttp/build/llhttp.h":
ctypedef llhttp_errno llhttp_errno_t
enum llhttp_flags:
- F_CONNECTION_KEEP_ALIVE,
- F_CONNECTION_CLOSE,
- F_CONNECTION_UPGRADE,
F_CHUNKED,
- F_UPGRADE,
- F_CONTENT_LENGTH,
- F_SKIPBODY,
- F_TRAILING,
- F_TRANSFER_ENCODING
-
- enum llhttp_lenient_flags:
- LENIENT_HEADERS,
- LENIENT_CHUNKED_LENGTH
+ F_CONTENT_LENGTH
enum llhttp_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
- enum llhttp_finish_t:
- HTTP_FINISH_SAFE,
- HTTP_FINISH_SAFE_WITH_CB,
- HTTP_FINISH_UNSAFE
-
enum llhttp_method:
HTTP_DELETE,
HTTP_GET,
@@ -167,24 +142,15 @@ cdef extern from "../vendor/llhttp/build/llhttp.h":
const llhttp_settings_t* settings)
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
- llhttp_errno_t llhttp_finish(llhttp_t* parser)
-
- int llhttp_message_needs_eof(const llhttp_t* parser)
int llhttp_should_keep_alive(const llhttp_t* parser)
- void llhttp_pause(llhttp_t* parser)
- void llhttp_resume(llhttp_t* parser)
-
void llhttp_resume_after_upgrade(llhttp_t* parser)
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
const char* llhttp_get_error_reason(const llhttp_t* parser)
- void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
const char* llhttp_get_error_pos(const llhttp_t* parser)
- const char* llhttp_errno_name(llhttp_errno_t err)
const char* llhttp_method_name(llhttp_method_t method)
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
- void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 344d0776633..e8d38c193bd 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -861,19 +861,6 @@ def test_partial_url(parser) -> None:
assert payload.is_eof()
-def test_url_parse_non_strict_mode(parser) -> None:
- payload = "GET /test/тест HTTP/1.1\r\n\r\n".encode()
- messages, upgrade, tail = parser.feed_data(payload)
- assert len(messages) == 1
-
- msg, payload = messages[0]
-
- assert msg.method == "GET"
- assert msg.path == "/test/тест"
- assert msg.version == (1, 1)
- assert payload.is_eof()
-
-
@pytest.mark.parametrize(
("uri", "path", "query", "fragment"),
[
@@ -898,6 +885,8 @@ def test_parse_uri_percent_encoded(parser, uri, path, query, fragment) -> None:
def test_parse_uri_utf8(parser) -> None:
+ if not isinstance(parser, HttpRequestParserPy):
+ pytest.xfail("Not valid HTTP. Maybe update py-parser to reject later.")
text = ("GET /путь?ключ=знач#фраг HTTP/1.1\r\n\r\n").encode()
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
diff --git a/vendor/README.rst b/vendor/README.rst
index 1f10e20cce2..6156f37f80e 100644
--- a/vendor/README.rst
+++ b/vendor/README.rst
@@ -5,7 +5,7 @@ When building aiohttp from source, there is a pure Python parser used by default
For better performance, you may want to build the higher performance C parser.
To build this ``llhttp`` parser, first get/update the submodules (to update to a
-newer release, add ``--remote`` and check the branch in ``.gitmodules``)::
+newer release, add ``--remote``)::
git submodule update --init --recursive
diff --git a/vendor/llhttp b/vendor/llhttp
index 7e18596bae8..ea67741b1b7 160000
--- a/vendor/llhttp
+++ b/vendor/llhttp
@@ -1 +1 @@
-Subproject commit 7e18596bae8f63692ded9d3250d5d984fe90dcfb
+Subproject commit ea67741b1b70c52d43d8520bf1750e4a7427e827
From a0d234df392bd5cd67d378d31c9531c5ac87c07f Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Mon, 7 Aug 2023 18:36:34 +0100
Subject: [PATCH 27/70] Use lenient headers for response parser (#7490) (#7492)
Co-authored-by: pre-commit-ci[bot]
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
(cherry picked from commit 63965310de606bab522d75e34f64877f69730152)
---
CHANGES/7490.feature | 1 +
aiohttp/_http_parser.pyx | 4 ++++
docs/index.rst | 12 ++++++++++++
setup.cfg | 5 +++++
tests/test_http_parser.py | 17 +++++++++++++++++
5 files changed, 39 insertions(+)
create mode 100644 CHANGES/7490.feature
diff --git a/CHANGES/7490.feature b/CHANGES/7490.feature
new file mode 100644
index 00000000000..7dda94a850f
--- /dev/null
+++ b/CHANGES/7490.feature
@@ -0,0 +1 @@
+Enabled lenient headers for more flexible parsing in the client. -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index 8b7d48245d2..92a70f5685f 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -20,6 +20,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD
from yarl import URL as _URL
from aiohttp import hdrs
+from aiohttp.helpers import DEBUG
from .http_exceptions import (
BadHttpMessage,
@@ -648,6 +649,9 @@ cdef class HttpResponseParser(HttpParser):
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, read_until_eof,
auto_decompress)
+ # Use strict parsing on dev mode, so users are warned about broken servers.
+ if not DEBUG:
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
cdef object _on_status_complete(self):
if self._buf:
diff --git a/docs/index.rst b/docs/index.rst
index 44c01a7b08c..a171dc1f48b 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -118,6 +118,18 @@ Server example:
For more information please visit :ref:`aiohttp-client` and
:ref:`aiohttp-web` pages.
+Development mode
+================
+
+When writing your code, we recommend enabling Python's
+`development mode `_
+(``python -X dev``). In addition to the extra features enabled for asyncio, aiohttp
+will:
+
+- Use a strict parser in the client code (which can help detect malformed responses
+ from a server).
+- Enable some additional checks (resulting in warnings in certain situations).
+
What's new in aiohttp 3?
========================
diff --git a/setup.cfg b/setup.cfg
index 47ffbc5d209..6d50d321811 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -130,6 +130,9 @@ addopts =
# `pytest-cov`:
--cov=aiohttp
--cov=tests/
+
+ # run tests that are not marked with dev_mode
+ -m "not dev_mode"
filterwarnings =
error
ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning
@@ -153,3 +156,5 @@ minversion = 3.8.2
testpaths = tests/
junit_family=xunit2
xfail_strict = true
+markers =
+ dev_mode: mark test to run in dev mode.
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index e8d38c193bd..d1678296a5b 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -721,6 +721,23 @@ def test_http_response_parser_no_reason(response) -> None:
assert msg.reason == ""
+def test_http_response_parser_lenient_headers(response) -> None:
+ messages, upgrade, tail = response.feed_data(
+ b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n"
+ )
+ msg = messages[0][0]
+
+ assert msg.headers["Foo"] == "abc\x01def"
+
+
+@pytest.mark.dev_mode
+def test_http_response_parser_strict_headers(response) -> None:
+ if isinstance(response, HttpResponseParserPy):
+ pytest.xfail("Py parser is lenient. May update py-parser later.")
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ response.feed_data(b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n")
+
+
def test_http_response_parser_bad(response) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(b"HTT/1\r\n\r\n")
From 8c4ec62f5ba514479ef1c2e74741bc7fa33be3f4 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sat, 12 Aug 2023 16:59:11 +0100
Subject: [PATCH 28/70] [PR #7518/8bd42e74 backport][3.8] Fix GunicornWebWorker
max_requests_jitter not work (#7519)
**This is a backport of PR #7518 as merged into master
(8bd42e742acc825e8b2f170c04216da3c767f56e).**
## What do these changes do?
`GunicornWebWorker` use `self.cfg.max_requests` which is not add jitter,
from
https://github.com/benoitc/gunicorn/blob/master/gunicorn/workers/base.py#L56-L60,
the correct way is to use `sef.max_requests`
## Are there changes in behavior for the user?
After the PR is merged, the max-requests-jitter parameter of Gunicorn
can take effect.
## Related issue number
## Checklist
- [x] I think the code is well written
- [x] Unit tests for the changes exist
- [ ] Documentation reflects the changes
- [ ] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
* The format is <Name> <Surname>.
* Please keep alphabetical order, the file is sorted by names.
- [x] Add a new news fragment into the `CHANGES` folder
* name it `.` for example (588.bugfix)
* if you don't have an `issue_id` change it to the pr id after creating
the pr
* ensure type is one of the following:
* `.feature`: Signifying a new feature.
* `.bugfix`: Signifying a bug fix.
* `.doc`: Signifying a documentation improvement.
* `.removal`: Signifying a deprecation or removal of public API.
* `.misc`: A ticket has been closed, but it is not of interest to users.
* Make sure to use full sentences with correct case and punctuation, for
example: "Fix issue with non-ascii contents in doctest text files."
Co-authored-by: phyng
---
CHANGES/7518.bugfix | 1 +
aiohttp/worker.py | 2 +-
tests/test_worker.py | 4 ++--
3 files changed, 4 insertions(+), 3 deletions(-)
create mode 100644 CHANGES/7518.bugfix
diff --git a/CHANGES/7518.bugfix b/CHANGES/7518.bugfix
new file mode 100644
index 00000000000..bc8083ba8ba
--- /dev/null
+++ b/CHANGES/7518.bugfix
@@ -0,0 +1 @@
+Fix GunicornWebWorker max_requests_jitter not work
diff --git a/aiohttp/worker.py b/aiohttp/worker.py
index f1302899f2f..ab6007a005a 100644
--- a/aiohttp/worker.py
+++ b/aiohttp/worker.py
@@ -114,7 +114,7 @@ async def _run(self) -> None:
self.notify()
cnt = server.requests_count
- if self.cfg.max_requests and cnt > self.cfg.max_requests:
+ if self.max_requests and cnt > self.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
diff --git a/tests/test_worker.py b/tests/test_worker.py
index 2e8b2c45d7e..68c28c88353 100644
--- a/tests/test_worker.py
+++ b/tests/test_worker.py
@@ -212,8 +212,8 @@ async def test__run_ok_parent_changed(worker, loop, aiohttp_unused_port) -> None
worker.sockets = [sock]
worker.log = mock.Mock()
worker.loop = loop
+ worker.max_requests = 0
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
- worker.cfg.max_requests = 0
worker.cfg.is_ssl = False
await worker._run()
@@ -233,8 +233,8 @@ async def test__run_exc(worker, loop, aiohttp_unused_port) -> None:
worker.sockets = [sock]
worker.log = mock.Mock()
worker.loop = loop
+ worker.max_requests = 0
worker.cfg.access_log_format = ACCEPTABLE_LOG_FORMAT
- worker.cfg.max_requests = 0
worker.cfg.is_ssl = False
def raiser():
From 5946c7436044bae14617ef06ee7c530ed72622da Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Thu, 7 Sep 2023 18:14:34 +0100
Subject: [PATCH 29/70] CookieJar - return 'best-match' and not LIFO (#7577)
(#7588)
Co-authored-by: marq24
(cherry picked from commit 9c932f71ec5a450954cee92ff9450974414ac1d8)
Co-authored-by: Matthias Marquardt
---
CHANGES/7577.bugfix | 1 +
CONTRIBUTORS.txt | 1 +
aiohttp/cookiejar.py | 3 ++-
tests/test_cookiejar.py | 28 ++++++++++++++++++++++++++++
4 files changed, 32 insertions(+), 1 deletion(-)
create mode 100644 CHANGES/7577.bugfix
diff --git a/CHANGES/7577.bugfix b/CHANGES/7577.bugfix
new file mode 100644
index 00000000000..361497fd780
--- /dev/null
+++ b/CHANGES/7577.bugfix
@@ -0,0 +1 @@
+Fix sorting in filter_cookies to use cookie with longest path -- by :user:`marq24`.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 8e31468aee6..c1d93268978 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -211,6 +211,7 @@ Martin Melka
Martin Richard
Mathias Fröjdman
Mathieu Dugré
+Matthias Marquardt
Matthieu Hauglustaine
Matthieu Rigal
Meet Mangukiya
diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py
index 6c88b47e358..e395b7403ae 100644
--- a/aiohttp/cookiejar.py
+++ b/aiohttp/cookiejar.py
@@ -251,7 +251,8 @@ def filter_cookies(
and request_origin not in self._treat_as_secure_origin
)
- for cookie in self:
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
+ for cookie in sorted(self, key=lambda c: len(c["path"])):
name = cookie.key
domain = cookie["domain"]
diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py
index 66f18c31d72..73e12536d6d 100644
--- a/tests/test_cookiejar.py
+++ b/tests/test_cookiejar.py
@@ -686,6 +686,34 @@ async def make_jar():
self.assertEqual(len(jar_filtered), 1)
self.assertEqual(jar_filtered["path-cookie"].value, "one")
+ def test_filter_cookies_order_by_path(self) -> None:
+ async def make_jar():
+ return CookieJar(unsafe=True)
+
+ jar = self.loop.run_until_complete(make_jar())
+ jar.update_cookies(
+ SimpleCookie("path-cookie=one; Domain=pathtest.com; Path=/one; ")
+ )
+ jar.update_cookies(
+ SimpleCookie("path-cookie=zero; Domain=pathtest.com; Path=/; ")
+ )
+ jar.update_cookies(
+ SimpleCookie("path-cookie=two; Domain=pathtest.com; Path=/second; ")
+ )
+ self.assertEqual(len(jar), 3)
+
+ jar_filtered = jar.filter_cookies(URL("http://pathtest.com/"))
+ self.assertEqual(len(jar_filtered), 1)
+ self.assertEqual(jar_filtered["path-cookie"].value, "zero")
+
+ jar_filtered = jar.filter_cookies(URL("http://pathtest.com/second"))
+ self.assertEqual(len(jar_filtered), 1)
+ self.assertEqual(jar_filtered["path-cookie"].value, "two")
+
+ jar_filtered = jar.filter_cookies(URL("http://pathtest.com/one"))
+ self.assertEqual(len(jar_filtered), 1)
+ self.assertEqual(jar_filtered["path-cookie"].value, "one")
+
async def test_dummy_cookie_jar() -> None:
cookie = SimpleCookie("foo=bar; Domain=example.com;")
From b30c0cd2c96e57cc273ffe29c0313487b364f15a Mon Sep 17 00:00:00 2001
From: John Parton
Date: Sat, 9 Sep 2023 06:36:37 -0500
Subject: [PATCH 30/70] Remove chardet/charset-normalizer. (#7589)
Add fallback_charset_resolver ClientSession parameter. (#7561)
Co-authored-by: Sam Bull
(cherry picked from commit 675579699422680607108a7dd68c85ec5284220c)
---------
Co-authored-by: Sam Bull
---
CHANGES/7561.feature | 2 ++
CONTRIBUTORS.txt | 1 +
aiohttp/client.py | 26 +++++++++++++++++
aiohttp/client_reqrep.py | 55 ++++++++++++++++++-----------------
docs/client_advanced.rst | 30 +++++++++++++++++++
docs/client_reference.rst | 51 +++++++++++++++-----------------
docs/index.rst | 8 -----
setup.cfg | 1 +
tests/test_client_response.py | 45 +++++++---------------------
9 files changed, 121 insertions(+), 98 deletions(-)
create mode 100644 CHANGES/7561.feature
diff --git a/CHANGES/7561.feature b/CHANGES/7561.feature
new file mode 100644
index 00000000000..a57914ff2a3
--- /dev/null
+++ b/CHANGES/7561.feature
@@ -0,0 +1,2 @@
+Replace automatic character set detection with a `fallback_charset_resolver` parameter
+in `ClientSession` to allow user-supplied character set detection functions.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index c1d93268978..f8a8df5e347 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -163,6 +163,7 @@ Jesus Cea
Jian Zeng
Jinkyu Yi
Joel Watts
+John Parton
Jon Nabozny
Jonas Krüger Svensson
Jonas Obrist
diff --git a/aiohttp/client.py b/aiohttp/client.py
index 0d0f4c16c0c..4f56f61727b 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -88,6 +88,11 @@
from .tracing import Trace, TraceConfig
from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
+try:
+ import cchardet as chardet
+except ImportError: # pragma: no cover
+ import charset_normalizer as chardet # type: ignore[no-redef]
+
__all__ = (
# client_exceptions
"ClientConnectionError",
@@ -159,6 +164,22 @@ class ClientTimeout:
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
_RetType = TypeVar("_RetType")
+_CharsetResolver = Callable[[ClientResponse, bytes], str]
+
+
+def _default_fallback_charset_resolver(response: ClientResponse, body: bytes) -> str:
+
+ ret: str = chardet.detect(body)["encoding"] or "utf-8"
+
+ if ret != "utf-8":
+ warnings.warn(
+ "Automatic charset detection will be removed in 3.9, see: "
+ "https://docs.aiohttp.org/en/stable/client_advanced.html#character-set-detection", # noqa: E501
+ DeprecationWarning,
+ stacklevel=3,
+ )
+
+ return ret
class ClientSession:
@@ -220,6 +241,9 @@ def __init__(
requote_redirect_url: bool = True,
trace_configs: Optional[List[TraceConfig]] = None,
read_bufsize: int = 2**16,
+ fallback_charset_resolver: _CharsetResolver = (
+ _default_fallback_charset_resolver
+ ),
) -> None:
if loop is None:
if connector is not None:
@@ -313,6 +337,8 @@ def __init__(
for trace_config in self._trace_configs:
trace_config.freeze()
+ self._resolve_charset = fallback_charset_resolver
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
warnings.warn(
"Inheritance class {} from ClientSession "
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 28b8a28d0d8..987d68f9034 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -1,5 +1,6 @@
import asyncio
import codecs
+import contextlib
import functools
import io
import re
@@ -12,6 +13,7 @@
from typing import (
TYPE_CHECKING,
Any,
+ Callable,
Dict,
Iterable,
List,
@@ -66,11 +68,6 @@
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
-try:
- import cchardet as chardet
-except ImportError: # pragma: no cover
- import charset_normalizer as chardet # type: ignore[no-redef]
-
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
@@ -722,8 +719,8 @@ class ClientResponse(HeadersMixin):
_raw_headers: RawHeaders = None # type: ignore[assignment] # Response raw headers
_connection = None # current connection
- _source_traceback = None
- # setted up by ClientRequest after ClientResponse object creation
+ _source_traceback: Optional[traceback.StackSummary] = None
+ # set up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_closed = True # to allow __del__ for non-initialized properly response
_released = False
@@ -760,6 +757,15 @@ def __init__(
self._loop = loop
# store a reference to session #1985
self._session: Optional[ClientSession] = session
+ # Save reference to _resolve_charset, so that get_encoding() will still
+ # work after the response has finished reading the body.
+ if session is None:
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
+ self._resolve_charset: Callable[
+ ["ClientResponse", bytes], str
+ ] = lambda *_: "utf-8"
+ else:
+ self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
@@ -1053,27 +1059,22 @@ def get_encoding(self) -> str:
encoding = mimetype.parameters.get("charset")
if encoding:
- try:
- codecs.lookup(encoding)
- except LookupError:
- encoding = None
- if not encoding:
- if mimetype.type == "application" and (
- mimetype.subtype == "json" or mimetype.subtype == "rdap"
- ):
- # RFC 7159 states that the default encoding is UTF-8.
- # RFC 7483 defines application/rdap+json
- encoding = "utf-8"
- elif self._body is None:
- raise RuntimeError(
- "Cannot guess the encoding of " "a not yet read body"
- )
- else:
- encoding = chardet.detect(self._body)["encoding"]
- if not encoding:
- encoding = "utf-8"
+ with contextlib.suppress(LookupError):
+ return codecs.lookup(encoding).name
+
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
+ ):
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ return "utf-8"
+
+ if self._body is None:
+ raise RuntimeError(
+ "Cannot compute fallback encoding of a not yet read body"
+ )
- return encoding
+ return self._resolve_charset(self, self._body)
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
"""Read response payload and decode."""
diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst
index 43d7dd251ef..e8f016a96f0 100644
--- a/docs/client_advanced.rst
+++ b/docs/client_advanced.rst
@@ -640,3 +640,33 @@ are changed so that aiohttp itself can wait on the underlying
connection to close. Please follow issue `#1925
`_ for the progress
on this.
+
+
+Character Set Detection
+-----------------------
+
+If you encounter an 'Automatic charset detection will be removed' warning
+when using :meth:`ClientResponse.text()` this may be because the response
+does not include the charset needed to decode the body.
+
+If you know the correct encoding for a request, you can simply specify
+the encoding as a parameter (e.g. ``resp.text("windows-1252")``).
+
+Alternatively, :class:`ClientSession` accepts a ``fallback_charset_resolver`` parameter which
+can be used to reintroduce charset guessing functionality. When a charset is not found
+in the Content-Type header, this function will be called to get the charset encoding. For
+example, this can be used with the ``chardetng_py`` library.::
+
+ from chardetng_py import detect
+
+ def charset_resolver(resp: ClientResponse, body: bytes) -> str:
+ tld = resp.url.host.rsplit(".", maxsplit=1)[-1]
+ return detect(body, allow_utf8=True, tld=tld)
+
+ ClientSession(fallback_charset_resolver=charset_resolver)
+
+Or, if ``chardetng_py`` doesn't work for you, then ``charset-normalizer`` is another option::
+
+ from charset_normalizer import detect
+
+ ClientSession(fallback_charset_resolver=lamba r, b: detect(b)["encoding"] or "utf-8")
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index 8d9abe37eb0..bb2f7e23032 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -51,7 +51,8 @@ The client session supports the context manager protocol for self closing.
read_bufsize=2**16, \
requote_redirect_url=False, \
trust_env=False, \
- trace_configs=None)
+ trace_configs=None, \
+ fallback_charset_resolver=_chardet_resolver)
The class for creating client sessions and making requests.
@@ -200,6 +201,18 @@ The client session supports the context manager protocol for self closing.
disabling. See :ref:`aiohttp-client-tracing-reference` for
more information.
+ :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver:
+ A :term:`callable` that accepts a :class:`ClientResponse` and the
+ :class:`bytes` contents, and returns a :class:`str` which will be used as
+ the encoding parameter to :meth:`bytes.decode()`.
+
+ This function will be called when the charset is not known (e.g. not specified in the
+ Content-Type header). The default function in 3.8.6 calls ``chardetng``
+ or ``charset-normaliser``. In 3.9+ this be replaced with a function that
+ simply defaults to ``utf-8``.
+
+ .. versionadded:: 3.8.6
+
.. attribute:: closed
``True`` if the session has been closed, ``False`` otherwise.
@@ -1400,12 +1413,8 @@ Response object
Read response's body and return decoded :class:`str` using
specified *encoding* parameter.
- If *encoding* is ``None`` content encoding is autocalculated
- using ``Content-Type`` HTTP header and *charset-normalizer* tool if the
- header is not provided by server.
-
- :term:`cchardet` is used with fallback to :term:`charset-normalizer` if
- *cchardet* is not available.
+ If *encoding* is ``None`` content encoding is determined from the
+ Content-Type header, or using the ``fallback_charset_resolver`` function.
Close underlying connection if data reading gets an error,
release connection otherwise.
@@ -1414,10 +1423,7 @@ Response object
``None`` for encoding autodetection
(default).
- :return str: decoded *BODY*
- :raise LookupError: if the encoding detected by cchardet is
- unknown by Python (e.g. VISCII).
.. note::
@@ -1430,18 +1436,15 @@ Response object
await resp.text('ISO-8859-1')
- .. comethod:: json(*, encoding=None, loads=json.loads, \
+ .. method:: json(*, encoding=None, loads=json.loads, \
content_type='application/json')
+ :async:
Read response's body as *JSON*, return :class:`dict` using
specified *encoding* and *loader*. If data is not still available
- a ``read`` call will be done,
+ a ``read`` call will be done.
- If *encoding* is ``None`` content encoding is autocalculated
- using :term:`cchardet` or :term:`charset-normalizer` as fallback if
- *cchardet* is not available.
-
- if response's `content-type` does not match `content_type` parameter
+ If response's `content-type` does not match `content_type` parameter
:exc:`aiohttp.ContentTypeError` get raised.
To disable content type check pass ``None`` value.
@@ -1473,17 +1476,9 @@ Response object
.. method:: get_encoding()
- Automatically detect content encoding using ``charset`` info in
- ``Content-Type`` HTTP header. If this info is not exists or there
- are no appropriate codecs for encoding then :term:`cchardet` /
- :term:`charset-normalizer` is used.
-
- Beware that it is not always safe to use the result of this function to
- decode a response. Some encodings detected by cchardet are not known by
- Python (e.g. VISCII). *charset-normalizer* is not concerned by that issue.
-
- :raise RuntimeError: if called before the body has been read,
- for :term:`cchardet` usage
+ Retrieve content encoding using ``charset`` info in ``Content-Type`` HTTP header.
+ If no charset is present or the charset is not understood by Python, the
+ ``fallback_charset_resolver`` function associated with the ``ClientSession`` is called.
.. versionadded:: 3.0
diff --git a/docs/index.rst b/docs/index.rst
index a171dc1f48b..94cebd01f7d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -162,14 +162,6 @@ Dependencies
- *charset-normalizer*
- *multidict*
- *yarl*
-- *Optional* :term:`cchardet` as faster replacement for
- :term:`charset-normalizer`.
-
- Install it explicitly via:
-
- .. code-block:: bash
-
- $ pip install cchardet
- *Optional* :term:`aiodns` for fast DNS resolving. The
library is highly recommended.
diff --git a/setup.cfg b/setup.cfg
index 6d50d321811..12cd4124742 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -150,6 +150,7 @@ filterwarnings =
# can be dropped with the next release of `certify`, specifically
# `certify > 2022.06.15`.
ignore:path is deprecated. Use files.. instead. Refer to https.//importlib-resources.readthedocs.io/en/latest/using.html#migrating-from-legacy for migration advice.:DeprecationWarning:certifi.core
+ ignore:Automatic charset detection will be removed in 3.9:DeprecationWarning
junit_suite_name = aiohttp_test_suite
norecursedirs = dist docs build .tox .eggs
minversion = 3.8.2
diff --git a/tests/test_client_response.py b/tests/test_client_response.py
index f8bee42be49..fa472e791ff 100644
--- a/tests/test_client_response.py
+++ b/tests/test_client_response.py
@@ -2,6 +2,7 @@
import gc
import sys
+from typing import Any
from unittest import mock
import pytest
@@ -440,7 +441,11 @@ def side_effect(*args, **kwargs):
assert not response.get_encoding.called
-async def test_text_detect_encoding(loop, session) -> None:
+@pytest.mark.parametrize("content_type", ("text/plain", "text/plain;charset=invalid"))
+async def test_text_charset_resolver(
+ content_type: str, loop: Any, session: Any
+) -> None:
+ session._resolve_charset = lambda r, b: "cp1251"
response = ClientResponse(
"get",
URL("http://def-cl-resp.org"),
@@ -458,7 +463,7 @@ def side_effect(*args, **kwargs):
fut.set_result('{"тест": "пройден"}'.encode("cp1251"))
return fut
- response._headers = {"Content-Type": "text/plain"}
+ response._headers = {"Content-Type": content_type}
content = response.content = mock.Mock()
content.read.side_effect = side_effect
@@ -466,35 +471,7 @@ def side_effect(*args, **kwargs):
res = await response.text()
assert res == '{"тест": "пройден"}'
assert response._connection is None
-
-
-async def test_text_detect_encoding_if_invalid_charset(loop, session) -> None:
- response = ClientResponse(
- "get",
- URL("http://def-cl-resp.org"),
- request_info=mock.Mock(),
- writer=mock.Mock(),
- continue100=None,
- timer=TimerNoop(),
- traces=[],
- loop=loop,
- session=session,
- )
-
- def side_effect(*args, **kwargs):
- fut = loop.create_future()
- fut.set_result('{"тест": "пройден"}'.encode("cp1251"))
- return fut
-
- response._headers = {"Content-Type": "text/plain;charset=invalid"}
- content = response.content = mock.Mock()
- content.read.side_effect = side_effect
-
- await response.read()
- res = await response.text()
- assert res == '{"тест": "пройден"}'
- assert response._connection is None
- assert response.get_encoding().lower() in ("windows-1251", "maccyrillic")
+ assert response.get_encoding() == "cp1251"
async def test_get_encoding_body_none(loop, session) -> None:
@@ -521,7 +498,7 @@ def side_effect(*args, **kwargs):
with pytest.raises(
RuntimeError,
- match="^Cannot guess the encoding of a not yet read body$",
+ match="^Cannot compute fallback encoding of a not yet read body$",
):
response.get_encoding()
assert response.closed
@@ -742,9 +719,7 @@ def test_get_encoding_unknown(loop, session) -> None:
)
response._headers = {"Content-Type": "application/json"}
- with mock.patch("aiohttp.client_reqrep.chardet") as m_chardet:
- m_chardet.detect.return_value = {"encoding": None}
- assert response.get_encoding() == "utf-8"
+ assert response.get_encoding() == "utf-8"
def test_raise_for_status_2xx() -> None:
From bcc416e533796d04fb8124ef1e7686b1f338767a Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Tue, 3 Oct 2023 17:25:46 +0100
Subject: [PATCH 31/70] [PR #7647/1303350e backport][3.8] Upgrade to llhttp
9.1.3 (#7648)
**This is a backport of PR #7647 as merged into master
(1303350e834899ad1c7733effc716a1a46a4b817).**
None
Co-authored-by: Sam Bull
---
aiohttp/_cparser.pxd | 2 ++
aiohttp/_http_parser.pyx | 2 ++
aiohttp/http_parser.py | 2 +-
tests/test_http_parser.py | 2 +-
vendor/llhttp | 2 +-
5 files changed, 7 insertions(+), 3 deletions(-)
diff --git a/aiohttp/_cparser.pxd b/aiohttp/_cparser.pxd
index 9dfb04134d1..c2cd5a92fda 100644
--- a/aiohttp/_cparser.pxd
+++ b/aiohttp/_cparser.pxd
@@ -154,3 +154,5 @@ cdef extern from "../vendor/llhttp/build/llhttp.h":
const char* llhttp_method_name(llhttp_method_t method)
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index 92a70f5685f..2b4b844d05e 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -652,6 +652,8 @@ cdef class HttpResponseParser(HttpParser):
# Use strict parsing on dev mode, so users are warned about broken servers.
if not DEBUG:
cparser.llhttp_set_lenient_headers(self._cparser, 1)
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
cdef object _on_status_complete(self):
if self._buf:
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index 5a66ce4b9ee..982570396c6 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -498,7 +498,7 @@ def parse_headers(
if hdrs.CONTENT_LENGTH in headers:
raise BadHttpMessage(
- "Content-Length can't be present with Transfer-Encoding",
+ "Transfer-Encoding can't be present with Content-Length",
)
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index d1678296a5b..6c2067c6ec2 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -340,7 +340,7 @@ def test_request_te_chunked_with_content_length(parser: Any) -> None:
)
with pytest.raises(
http_exceptions.BadHttpMessage,
- match="Content-Length can't be present with Transfer-Encoding",
+ match="Transfer-Encoding can't be present with Content-Length",
):
parser.feed_data(text)
diff --git a/vendor/llhttp b/vendor/llhttp
index ea67741b1b7..9ab2afc85b2 160000
--- a/vendor/llhttp
+++ b/vendor/llhttp
@@ -1 +1 @@
-Subproject commit ea67741b1b70c52d43d8520bf1750e4a7427e827
+Subproject commit 9ab2afc85b2880d96a94d38afaee301c6a314049
From 8a3977acac632d1f02aa7e047da51e27a717d724 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Tue, 3 Oct 2023 16:41:37 +0000
Subject: [PATCH 32/70] [PR #7272/b2a7983a backport][3.8] Fix Read The Docs
config (#7650)
**This is a backport of PR #7272 as merged into master
(b2a7983aed047e36c3418e788096d7f46cea7e1e).**
None
Co-authored-by: Sam Bull
---
.readthedocs.yml | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 90fe80896bc..022dd5c3f53 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -6,14 +6,16 @@
version: 2
submodules:
- include: all # []
+ include: all
exclude: []
recursive: true
build:
- image: latest
+ os: ubuntu-22.04
+ tools:
+ python: "3.11"
+
python:
- version: 3.8
install:
- method: pip
path: .
From d5c12ba890557a575c313bb3017910d7616fce3d Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Fri, 6 Oct 2023 17:11:40 +0100
Subject: [PATCH 33/70] [PR #7661/85713a48 backport][3.8] Update Python parser
for RFCs 9110/9112 (#7662)
**This is a backport of PR #7661 as merged into 3.9
(85713a4894610e848490915e5871ad71199348e2).**
None
Co-authored-by: Sam Bull
---
aiohttp/http_parser.py | 84 +++++++++++++++++++++----------------
tests/test_http_parser.py | 87 ++++++++++++++++++++++++++++++++++++---
2 files changed, 131 insertions(+), 40 deletions(-)
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index 982570396c6..4356670c3ed 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -60,16 +60,16 @@
ASCIISET: Final[Set[str]] = set(string.printable)
-# See https://tools.ietf.org/html/rfc7230#section-3.1.1
-# and https://tools.ietf.org/html/rfc7230#appendix-B
+# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
+# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
#
# method = token
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
-VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
-HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d).(\d)")
+HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\"\\]")
class RawRequestMessage(NamedTuple):
@@ -148,8 +148,11 @@ def parse_headers(
except ValueError:
raise InvalidHeader(line) from None
- bname = bname.strip(b" \t")
- bvalue = bvalue.lstrip()
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
+ raise InvalidHeader(line)
+
+ bvalue = bvalue.lstrip(b" \t")
if HDRRE.search(bname):
raise InvalidHeader(bname)
if len(bname) > self.max_field_size:
@@ -170,6 +173,7 @@ def parse_headers(
# consume continuation lines
continuation = line and line[0] in (32, 9) # (' ', '\t')
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
if continuation:
bvalue_lst = [bvalue]
while continuation:
@@ -204,10 +208,14 @@ def parse_headers(
str(header_length),
)
- bvalue = bvalue.strip()
+ bvalue = bvalue.strip(b" \t")
name = bname.decode("utf-8", "surrogateescape")
value = bvalue.decode("utf-8", "surrogateescape")
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ if "\n" in value or "\r" in value or "\x00" in value:
+ raise InvalidHeader(bvalue)
+
headers.add(name, value)
raw_headers.append((bname, bvalue))
@@ -322,15 +330,12 @@ def get_content_length() -> Optional[int]:
if length_hdr is None:
return None
- try:
- length = int(length_hdr)
- except ValueError:
+ # Shouldn't allow +/- or other number formats.
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
+ if not length_hdr.strip(" \t").isdigit():
raise InvalidHeader(CONTENT_LENGTH)
- if length < 0:
- raise InvalidHeader(CONTENT_LENGTH)
-
- return length
+ return int(length_hdr)
length = get_content_length()
# do not support old websocket spec
@@ -470,6 +475,24 @@ def parse_headers(
upgrade = False
chunked = False
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
+ singletons = (
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TYPE,
+ hdrs.ETAG,
+ hdrs.HOST,
+ hdrs.MAX_FORWARDS,
+ hdrs.SERVER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.USER_AGENT,
+ )
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
+ if bad_hdr is not None:
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
+
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
@@ -523,7 +546,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
# request line
line = lines[0].decode("utf-8", "surrogateescape")
try:
- method, path, version = line.split(None, 2)
+ method, path, version = line.split(maxsplit=2)
except ValueError:
raise BadStatusLine(line) from None
@@ -537,14 +560,10 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
raise BadStatusLine(method)
# version
- try:
- if version.startswith("HTTP/"):
- n1, n2 = version[5:].split(".", 1)
- version_o = HttpVersion(int(n1), int(n2))
- else:
- raise BadStatusLine(version)
- except Exception:
- raise BadStatusLine(version)
+ match = VERSRE.match(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
if method == "CONNECT":
# authority-form,
@@ -611,12 +630,12 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
- version, status = line.split(None, 1)
+ version, status = line.split(maxsplit=1)
except ValueError:
raise BadStatusLine(line) from None
try:
- status, reason = status.split(None, 1)
+ status, reason = status.split(maxsplit=1)
except ValueError:
reason = ""
@@ -632,13 +651,9 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
- try:
- status_i = int(status)
- except ValueError:
- raise BadStatusLine(line) from None
-
- if status_i > 999:
+ if len(status) != 3 or not status.isdigit():
raise BadStatusLine(line)
+ status_i = int(status)
# read headers
(
@@ -773,14 +788,13 @@ def feed_data(
else:
size_b = chunk[:pos]
- try:
- size = int(bytes(size_b), 16)
- except ValueError:
+ if not size_b.isdigit():
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
- raise exc from None
+ raise exc
+ size = int(bytes(size_b), 16)
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 6c2067c6ec2..b002fed43f2 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -465,6 +465,74 @@ def test_invalid_name(parser) -> None:
parser.feed_data(text)
+def test_cve_2023_37276(parser: Any) -> None:
+ text = b"""POST / HTTP/1.1\r\nHost: localhost:8080\r\nX-Abc: \rxTransfer-Encoding: chunked\r\n\r\n"""
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+@pytest.mark.parametrize(
+ "hdr",
+ (
+ "Content-Length: -5", # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length
+ "Content-Length: +256",
+ "Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ "Bar: abc\ndef",
+ "Baz: abc\x00def",
+ "Foo : bar", # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ "Foo\t: bar",
+ ),
+)
+def test_bad_headers(parser: Any, hdr: str) -> None:
+ text = f"POST / HTTP/1.1\r\n{hdr}\r\n\r\n".encode()
+ with pytest.raises(http_exceptions.InvalidHeader):
+ parser.feed_data(text)
+
+
+def test_bad_chunked_py(loop: Any, protocol: Any) -> None:
+ """Test that invalid chunked encoding doesn't allow content-length to be used."""
+ parser = HttpRequestParserPy(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
+ )
+ messages, upgrade, tail = parser.feed_data(text)
+ assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
+
+
+@pytest.mark.skipif(
+ "HttpRequestParserC" not in dir(aiohttp.http_parser),
+ reason="C based HTTP parser not available",
+)
+def test_bad_chunked_c(loop: Any, protocol: Any) -> None:
+ """C parser behaves differently. Maybe we should align them later."""
+ parser = HttpRequestParserC(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+def test_whitespace_before_header(parser: Any) -> None:
+ text = b"GET / HTTP/1.1\r\n\tContent-Length: 1\r\n\r\nX"
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
@pytest.mark.parametrize("size", [40960, 8191])
def test_max_header_field_size(parser, size) -> None:
name = b"t" * size
@@ -646,6 +714,11 @@ def test_http_request_parser_bad_version(parser) -> None:
parser.feed_data(b"GET //get HT/11\r\n\r\n")
+def test_http_request_parser_bad_version_number(parser: Any) -> None:
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(b"GET /test HTTP/12.3\r\n\r\n")
+
+
@pytest.mark.parametrize("size", [40965, 8191])
def test_http_request_max_status_line(parser, size) -> None:
path = b"t" * (size - 5)
@@ -713,6 +786,11 @@ def test_http_response_parser_bad_version(response) -> None:
response.feed_data(b"HT/11 200 Ok\r\n\r\n")
+def test_http_response_parser_bad_version_number(response) -> None:
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ response.feed_data(b"HTTP/12.3 200 Ok\r\n\r\n")
+
+
def test_http_response_parser_no_reason(response) -> None:
msg = response.feed_data(b"HTTP/1.1 200\r\n\r\n")[0][0][0]
@@ -743,19 +821,18 @@ def test_http_response_parser_bad(response) -> None:
response.feed_data(b"HTT/1\r\n\r\n")
-@pytest.mark.skipif(not NO_EXTENSIONS, reason="Behaviour has changed in C parser")
def test_http_response_parser_code_under_100(response) -> None:
- msg = response.feed_data(b"HTTP/1.1 99 test\r\n\r\n")[0][0][0]
- assert msg.code == 99
+ with pytest.raises(http_exceptions.BadStatusLine):
+ response.feed_data(b"HTTP/1.1 99 test\r\n\r\n")
def test_http_response_parser_code_above_999(response) -> None:
- with pytest.raises(http_exceptions.BadHttpMessage):
+ with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 9999 test\r\n\r\n")
def test_http_response_parser_code_not_int(response) -> None:
- with pytest.raises(http_exceptions.BadHttpMessage):
+ with pytest.raises(http_exceptions.BadStatusLine):
response.feed_data(b"HTTP/1.1 ttt test\r\n\r\n")
From 89b7df157886ff390cdcdc44ecf3c277045838b1 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Fri, 6 Oct 2023 21:52:48 +0100
Subject: [PATCH 34/70] Allow lax response parsing on Py parser (#7663) (#7664)
(cherry picked from commit bd5f92437173aae77cb128a1ebb8bf58effd13b5)
---------
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
CHANGES/7663.feature | 1 +
Makefile | 2 +-
aiohttp/http_parser.py | 76 ++++++++++----
tests/test_http_parser.py | 205 +++++++++++++++++++++++++-------------
4 files changed, 198 insertions(+), 86 deletions(-)
create mode 100644 CHANGES/7663.feature
diff --git a/CHANGES/7663.feature b/CHANGES/7663.feature
new file mode 100644
index 00000000000..509a7ad7e2a
--- /dev/null
+++ b/CHANGES/7663.feature
@@ -0,0 +1 @@
+Updated Python parser to comply with latest HTTP specs and allow lax response parsing -- by :user:`Dreamorcerer`
diff --git a/Makefile b/Makefile
index 5769d2a1287..98a8812b689 100644
--- a/Makefile
+++ b/Makefile
@@ -58,7 +58,7 @@ aiohttp/_find_header.c: $(call to-hash,aiohttp/hdrs.py ./tools/gen.py)
# _find_headers generator creates _headers.pyi as well
aiohttp/%.c: aiohttp/%.pyx $(call to-hash,$(CYS)) aiohttp/_find_header.c
- cython -3 -o $@ $< -I aiohttp
+ cython -3 -o $@ $< -I aiohttp -Werror
vendor/llhttp/node_modules: vendor/llhttp/package.json
cd vendor/llhttp; npm install
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index 4356670c3ed..91e01f437af 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -3,11 +3,13 @@
import collections
import re
import string
+import sys
import zlib
from contextlib import suppress
from enum import IntEnum
from typing import (
Any,
+ ClassVar,
Generic,
List,
NamedTuple,
@@ -26,7 +28,7 @@
from . import hdrs
from .base_protocol import BaseProtocol
-from .helpers import NO_EXTENSIONS, BaseTimerContext
+from .helpers import DEBUG, NO_EXTENSIONS, BaseTimerContext
from .http_exceptions import (
BadHttpMessage,
BadStatusLine,
@@ -41,6 +43,11 @@
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import Final, RawHeaders
+if sys.version_info >= (3, 8):
+ from typing import Literal
+else:
+ from typing_extensions import Literal
+
try:
import brotli
@@ -58,6 +65,8 @@
"RawResponseMessage",
)
+_SEP = Literal[b"\r\n", b"\n"]
+
ASCIISET: Final[Set[str]] = set(string.printable)
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
@@ -70,6 +79,7 @@
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d).(\d)")
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\"\\]")
+HEXDIGIT = re.compile(rb"[0-9a-fA-F]+")
class RawRequestMessage(NamedTuple):
@@ -173,7 +183,8 @@ def parse_headers(
# consume continuation lines
continuation = line and line[0] in (32, 9) # (' ', '\t')
- # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
+ # Deprecated:
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
if continuation:
bvalue_lst = [bvalue]
while continuation:
@@ -223,6 +234,8 @@ def parse_headers(
class HttpParser(abc.ABC, Generic[_MsgT]):
+ lax: ClassVar[bool] = False
+
def __init__(
self,
protocol: Optional[BaseProtocol] = None,
@@ -285,7 +298,7 @@ def feed_eof(self) -> Optional[_MsgT]:
def feed_data(
self,
data: bytes,
- SEP: bytes = b"\r\n",
+ SEP: _SEP = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
@@ -309,13 +322,16 @@ def feed_data(
pos = data.find(SEP, start_pos)
# consume \r\n
if pos == start_pos and not self._lines:
- start_pos = pos + 2
+ start_pos = pos + len(SEP)
continue
if pos >= start_pos:
# line found
- self._lines.append(data[start_pos:pos])
- start_pos = pos + 2
+ line = data[start_pos:pos]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._lines.append(line)
+ start_pos = pos + len(SEP)
# \r\n\r\n found
if self._lines[-1] == EMPTY:
@@ -332,7 +348,7 @@ def get_content_length() -> Optional[int]:
# Shouldn't allow +/- or other number formats.
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
- if not length_hdr.strip(" \t").isdigit():
+ if not length_hdr.strip(" \t").isdecimal():
raise InvalidHeader(CONTENT_LENGTH)
return int(length_hdr)
@@ -369,6 +385,7 @@ def get_content_length() -> Optional[int]:
readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
+ lax=self.lax,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -387,6 +404,7 @@ def get_content_length() -> Optional[int]:
compression=msg.compression,
readall=True,
auto_decompress=self._auto_decompress,
+ lax=self.lax,
)
else:
if (
@@ -410,6 +428,7 @@ def get_content_length() -> Optional[int]:
readall=True,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
+ lax=self.lax,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -432,7 +451,7 @@ def get_content_length() -> Optional[int]:
assert not self._lines
assert self._payload_parser is not None
try:
- eof, data = self._payload_parser.feed_data(data[start_pos:])
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
except BaseException as exc:
if self.payload_exception is not None:
self._payload_parser.payload.set_exception(
@@ -627,6 +646,20 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
Returns RawResponseMessage.
"""
+ # Lax mode should only be enabled on response parser.
+ lax = not DEBUG
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: Optional[_SEP] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
+ if SEP is None:
+ SEP = b"\r\n" if DEBUG else b"\n"
+ return super().feed_data(data, SEP, *args, **kwargs)
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
@@ -651,7 +684,7 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
- if len(status) != 3 or not status.isdigit():
+ if len(status) != 3 or not status.isdecimal():
raise BadStatusLine(line)
status_i = int(status)
@@ -693,6 +726,7 @@ def __init__(
readall: bool = False,
response_with_body: bool = True,
auto_decompress: bool = True,
+ lax: bool = False,
) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
@@ -700,6 +734,7 @@ def __init__(
self._chunk_size = 0
self._chunk_tail = b""
self._auto_decompress = auto_decompress
+ self._lax = lax
self.done = False
# payload decompression wrapper
@@ -751,7 +786,7 @@ def feed_eof(self) -> None:
)
def feed_data(
- self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
@@ -788,7 +823,10 @@ def feed_data(
else:
size_b = chunk[:pos]
- if not size_b.isdigit():
+ if self._lax: # Allow whitespace in lax mode.
+ size_b = size_b.strip()
+
+ if not re.fullmatch(HEXDIGIT, size_b):
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
@@ -796,9 +834,11 @@ def feed_data(
raise exc
size = int(bytes(size_b), 16)
- chunk = chunk[pos + 2 :]
+ chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
@@ -820,13 +860,15 @@ def feed_data(
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
- if chunk[:2] == SEP:
- chunk = chunk[2:]
+ if chunk[: len(SEP)] == SEP:
+ chunk = chunk[len(SEP) :]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
@@ -836,11 +878,11 @@ def feed_data(
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
- head = chunk[:2]
+ head = chunk[: len(SEP)]
if head == SEP:
# end of stream
self.payload.feed_eof()
- return True, chunk[2:]
+ return True, chunk[len(SEP) :]
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
@@ -858,7 +900,7 @@ def feed_data(
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
- chunk = chunk[pos + 2 :]
+ chunk = chunk[pos + len(SEP) :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index b002fed43f2..b742157d149 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -159,6 +159,87 @@ def test_invalid_linebreak(loop: Any, protocol: Any, request: Any) -> None:
parser.feed_data(text)
+def test_cve_2023_37276(parser) -> None:
+ text = (
+ b"POST / HTTP/1.1\r\nHost: localhost:8080\r\n"
+ b"X-Abc: \rxTransfer-Encoding: chunked\r\n\r\n"
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+@pytest.mark.parametrize(
+ "hdr",
+ (
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length
+ "Content-Length: -5",
+ "Content-Length: +256",
+ "Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ "Bar: abc\ndef",
+ "Baz: abc\x00def",
+ "Foo : bar", # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ "Foo\t: bar",
+ ),
+)
+def test_bad_headers(parser, hdr: str) -> None:
+ text = f"POST / HTTP/1.1\r\n{hdr}\r\n\r\n".encode()
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+def test_content_length_transfer_encoding(parser) -> None:
+ text = (
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n"
+ + b"Transfer-Encoding: a\r\n\r\napple\r\n"
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+def test_bad_chunked_py(loop, protocol) -> None:
+ """Test that invalid chunked encoding doesn't allow content-length to be used."""
+ parser = HttpRequestParserPy(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
+ )
+ messages, upgrade, tail = parser.feed_data(text)
+ assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
+
+
+@pytest.mark.skipif(
+ "HttpRequestParserC" not in dir(aiohttp.http_parser),
+ reason="C based HTTP parser not available",
+)
+def test_bad_chunked_c(loop, protocol) -> None:
+ """C parser behaves differently. Maybe we should align them later."""
+ parser = HttpRequestParserC(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
+def test_whitespace_before_header(parser) -> None:
+ text = b"GET / HTTP/1.1\r\n\tContent-Length: 1\r\n\r\nX"
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ parser.feed_data(text)
+
+
def test_parse(parser) -> None:
text = b"GET /test HTTP/1.1\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
@@ -465,74 +546,6 @@ def test_invalid_name(parser) -> None:
parser.feed_data(text)
-def test_cve_2023_37276(parser: Any) -> None:
- text = b"""POST / HTTP/1.1\r\nHost: localhost:8080\r\nX-Abc: \rxTransfer-Encoding: chunked\r\n\r\n"""
- with pytest.raises(http_exceptions.BadHttpMessage):
- parser.feed_data(text)
-
-
-@pytest.mark.parametrize(
- "hdr",
- (
- "Content-Length: -5", # https://www.rfc-editor.org/rfc/rfc9110.html#name-content-length
- "Content-Length: +256",
- "Foo: abc\rdef", # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
- "Bar: abc\ndef",
- "Baz: abc\x00def",
- "Foo : bar", # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
- "Foo\t: bar",
- ),
-)
-def test_bad_headers(parser: Any, hdr: str) -> None:
- text = f"POST / HTTP/1.1\r\n{hdr}\r\n\r\n".encode()
- with pytest.raises(http_exceptions.InvalidHeader):
- parser.feed_data(text)
-
-
-def test_bad_chunked_py(loop: Any, protocol: Any) -> None:
- """Test that invalid chunked encoding doesn't allow content-length to be used."""
- parser = HttpRequestParserPy(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- text = (
- b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
- + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
- )
- messages, upgrade, tail = parser.feed_data(text)
- assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
-
-
-@pytest.mark.skipif(
- "HttpRequestParserC" not in dir(aiohttp.http_parser),
- reason="C based HTTP parser not available",
-)
-def test_bad_chunked_c(loop: Any, protocol: Any) -> None:
- """C parser behaves differently. Maybe we should align them later."""
- parser = HttpRequestParserC(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- text = (
- b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
- + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
- )
- with pytest.raises(http_exceptions.BadHttpMessage):
- parser.feed_data(text)
-
-
-def test_whitespace_before_header(parser: Any) -> None:
- text = b"GET / HTTP/1.1\r\n\tContent-Length: 1\r\n\r\nX"
- with pytest.raises(http_exceptions.BadHttpMessage):
- parser.feed_data(text)
-
-
@pytest.mark.parametrize("size", [40960, 8191])
def test_max_header_field_size(parser, size) -> None:
name = b"t" * size
@@ -816,6 +829,62 @@ def test_http_response_parser_strict_headers(response) -> None:
response.feed_data(b"HTTP/1.1 200 test\r\nFoo: abc\x01def\r\n\r\n")
+def test_http_response_parser_bad_crlf(response) -> None:
+ """Still a lot of dodgy servers sending bad requests like this."""
+ messages, upgrade, tail = response.feed_data(
+ b"HTTP/1.0 200 OK\nFoo: abc\nBar: def\n\nBODY\n"
+ )
+ msg = messages[0][0]
+
+ assert msg.headers["Foo"] == "abc"
+ assert msg.headers["Bar"] == "def"
+
+
+async def test_http_response_parser_bad_chunked_lax(response) -> None:
+ text = (
+ b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
+ )
+ messages, upgrade, tail = response.feed_data(text)
+
+ assert await messages[0][1].read(5) == b"abcde"
+
+
+@pytest.mark.dev_mode
+async def test_http_response_parser_bad_chunked_strict_py(loop, protocol) -> None:
+ response = HttpResponseParserPy(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
+ )
+ messages, upgrade, tail = response.feed_data(text)
+ assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
+
+
+@pytest.mark.dev_mode
+@pytest.mark.skipif(
+ "HttpRequestParserC" not in dir(aiohttp.http_parser),
+ reason="C based HTTP parser not available",
+)
+async def test_http_response_parser_bad_chunked_strict_c(loop, protocol) -> None:
+ response = HttpResponseParserC(
+ protocol,
+ loop,
+ 2**16,
+ max_line_size=8190,
+ max_field_size=8190,
+ )
+ text = (
+ b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
+ )
+ with pytest.raises(http_exceptions.BadHttpMessage):
+ response.feed_data(text)
+
+
def test_http_response_parser_bad(response) -> None:
with pytest.raises(http_exceptions.BadHttpMessage):
response.feed_data(b"HTT/1\r\n\r\n")
From 8c128d4f042ca36ebdc55ecdd76099b7722331ba Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Fri, 6 Oct 2023 22:42:08 +0100
Subject: [PATCH 35/70] [PR #7651/45f98b7d backport][3.8] Fix BadStatusLine
message (#7666)
**This is a backport of PR #7651 as merged into master
(45f98b7d5b1aacf21a6cf446a5df51ee2ea2b4b7).**
---
CHANGES/7651.bugfix | 1 +
aiohttp/_http_parser.pyx | 30 +++++++++++-------------------
aiohttp/http_exceptions.py | 4 ++--
tests/test_http_parser.py | 4 +++-
4 files changed, 17 insertions(+), 22 deletions(-)
create mode 100644 CHANGES/7651.bugfix
diff --git a/CHANGES/7651.bugfix b/CHANGES/7651.bugfix
new file mode 100644
index 00000000000..bc013ecf5fa
--- /dev/null
+++ b/CHANGES/7651.bugfix
@@ -0,0 +1 @@
+Fixed display of ``BadStatusLine`` messages from ``llhttp`` -- by :user:`Dreamsorcerer`
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index 2b4b844d05e..3f28fbdab43 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -2,7 +2,6 @@
#
# Based on https://github.com/MagicStack/httptools
#
-from __future__ import absolute_import, print_function
from cpython cimport (
Py_buffer,
@@ -813,7 +812,9 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
- if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
+
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
cparser.HPE_CB_HEADERS_COMPLETE,
cparser.HPE_CB_MESSAGE_COMPLETE,
cparser.HPE_CB_CHUNK_HEADER,
@@ -823,22 +824,13 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
cparser.HPE_INVALID_CONTENT_LENGTH,
cparser.HPE_INVALID_CHUNK_SIZE,
cparser.HPE_INVALID_EOF_STATE,
- cparser.HPE_INVALID_TRANSFER_ENCODING):
- cls = BadHttpMessage
-
- elif errno == cparser.HPE_INVALID_STATUS:
- cls = BadStatusLine
-
- elif errno == cparser.HPE_INVALID_METHOD:
- cls = BadStatusLine
-
- elif errno == cparser.HPE_INVALID_VERSION:
- cls = BadStatusLine
-
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
+ return BadHttpMessage(err_msg)
+ elif errno in {cparser.HPE_INVALID_STATUS,
+ cparser.HPE_INVALID_METHOD,
+ cparser.HPE_INVALID_VERSION}:
+ return BadStatusLine(error=err_msg)
elif errno == cparser.HPE_INVALID_URL:
- cls = InvalidURLError
-
- else:
- cls = BadHttpMessage
+ return InvalidURLError(err_msg)
- return cls("{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer))
+ return BadHttpMessage(err_msg)
diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py
index b5d16ea4ec1..63c20af3d96 100644
--- a/aiohttp/http_exceptions.py
+++ b/aiohttp/http_exceptions.py
@@ -95,10 +95,10 @@ def __init__(self, hdr: Union[bytes, str]) -> None:
class BadStatusLine(BadHttpMessage):
- def __init__(self, line: str = "") -> None:
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
if not isinstance(line, str):
line = repr(line)
- super().__init__(f"Bad status line {line!r}")
+ super().__init__(error or f"Bad status line {line!r}")
self.args = (line,)
self.line = line
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index b742157d149..4b185c9e6b1 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -649,8 +649,10 @@ def test_http_request_parser(parser) -> None:
def test_http_request_bad_status_line(parser) -> None:
text = b"getpath \r\n\r\n"
- with pytest.raises(http_exceptions.BadStatusLine):
+ with pytest.raises(http_exceptions.BadStatusLine) as exc_info:
parser.feed_data(text)
+ # Check for accidentally escaped message.
+ assert r"\n" not in exc_info.value.message
def test_http_request_upgrade(parser) -> None:
From 996de2629ef6b4c2934a7c04dfd49d0950d4c43b Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Sat, 7 Oct 2023 13:40:06 +0100
Subject: [PATCH 36/70] Release v3.8.6 (#7668)
Co-authored-by: Sviatoslav Sydorenko
---
CHANGES.rst | 73 +++++++++++++++++++++++++++++++++++++++++++-
CHANGES/7237.bugfix | 1 -
CHANGES/7468.bugfix | 1 -
CHANGES/7484.misc | 1 -
CHANGES/7490.feature | 1 -
CHANGES/7518.bugfix | 1 -
CHANGES/7561.feature | 2 --
CHANGES/7577.bugfix | 1 -
CHANGES/7651.bugfix | 1 -
CHANGES/7663.feature | 1 -
aiohttp/__init__.py | 2 +-
11 files changed, 73 insertions(+), 12 deletions(-)
delete mode 100644 CHANGES/7237.bugfix
delete mode 100644 CHANGES/7468.bugfix
delete mode 100644 CHANGES/7484.misc
delete mode 100644 CHANGES/7490.feature
delete mode 100644 CHANGES/7518.bugfix
delete mode 100644 CHANGES/7561.feature
delete mode 100644 CHANGES/7577.bugfix
delete mode 100644 CHANGES/7651.bugfix
delete mode 100644 CHANGES/7663.feature
diff --git a/CHANGES.rst b/CHANGES.rst
index d929a0acc0d..fc3740c6299 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,78 @@
.. towncrier release notes start
+3.8.6 (2023-10-07)
+==================
+
+Security bugfixes
+-----------------
+
+- Upgraded the vendored copy of llhttp_ to v9.1.3 -- by :user:`Dreamsorcerer`
+
+ Thanks to :user:`kenballus` for reporting this, see
+ https://github.com/aio-libs/aiohttp/security/advisories/GHSA-pjjw-qhg8-p2p9.
+
+ .. _llhttp: https://llhttp.org
+
+ `#7647 `_
+
+- Updated Python parser to comply with RFCs 9110/9112 -- by :user:`Dreamorcerer`
+
+ Thanks to :user:`kenballus` for reporting this, see
+ https://github.com/aio-libs/aiohttp/security/advisories/GHSA-gfw2-4jvh-wgfg.
+
+ `#7663 `_
+
+
+Deprecation
+-----------
+
+- Added ``fallback_charset_resolver`` parameter in ``ClientSession`` to allow a user-supplied
+ character set detection function.
+
+ Character set detection will no longer be included in 3.9 as a default. If this feature is needed,
+ please use `fallback_charset_resolver `_.
+
+ `#7561 `_
+
+
+Features
+--------
+
+- Enabled lenient response parsing for more flexible parsing in the client
+ (this should resolve some regressions when dealing with badly formatted HTTP responses). -- by :user:`Dreamsorcerer`
+
+ `#7490 `_
+
+
+
+Bugfixes
+--------
+
+- Fixed ``PermissionError`` when ``.netrc`` is unreadable due to permissions.
+
+ `#7237 `_
+
+- Fixed output of parsing errors pointing to a ``\n``. -- by :user:`Dreamsorcerer`
+
+ `#7468 `_
+
+- Fixed ``GunicornWebWorker`` max_requests_jitter not working.
+
+ `#7518 `_
+
+- Fixed sorting in ``filter_cookies`` to use cookie with longest path. -- by :user:`marq24`.
+
+ `#7577 `_
+
+- Fixed display of ``BadStatusLine`` messages from llhttp_. -- by :user:`Dreamsorcerer`
+
+ `#7651 `_
+
+
+----
+
+
3.8.5 (2023-07-19)
==================
@@ -45,7 +117,6 @@ Bugfixes
`#3355 `_
-
----
diff --git a/CHANGES/7237.bugfix b/CHANGES/7237.bugfix
deleted file mode 100644
index 26f85ea9c95..00000000000
--- a/CHANGES/7237.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fixed ``PermissionError`` when .netrc is unreadable due to permissions.
diff --git a/CHANGES/7468.bugfix b/CHANGES/7468.bugfix
deleted file mode 100644
index 3f9c256ca0c..00000000000
--- a/CHANGES/7468.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fixed output of parsing errors on `\n`. -- by :user:`Dreamsorcerer`
diff --git a/CHANGES/7484.misc b/CHANGES/7484.misc
deleted file mode 100644
index ff540ceb709..00000000000
--- a/CHANGES/7484.misc
+++ /dev/null
@@ -1 +0,0 @@
-Upgraded llhttp parser to v9 -- by :user:`Dreamsorcerer`
diff --git a/CHANGES/7490.feature b/CHANGES/7490.feature
deleted file mode 100644
index 7dda94a850f..00000000000
--- a/CHANGES/7490.feature
+++ /dev/null
@@ -1 +0,0 @@
-Enabled lenient headers for more flexible parsing in the client. -- by :user:`Dreamsorcerer`
diff --git a/CHANGES/7518.bugfix b/CHANGES/7518.bugfix
deleted file mode 100644
index bc8083ba8ba..00000000000
--- a/CHANGES/7518.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix GunicornWebWorker max_requests_jitter not work
diff --git a/CHANGES/7561.feature b/CHANGES/7561.feature
deleted file mode 100644
index a57914ff2a3..00000000000
--- a/CHANGES/7561.feature
+++ /dev/null
@@ -1,2 +0,0 @@
-Replace automatic character set detection with a `fallback_charset_resolver` parameter
-in `ClientSession` to allow user-supplied character set detection functions.
diff --git a/CHANGES/7577.bugfix b/CHANGES/7577.bugfix
deleted file mode 100644
index 361497fd780..00000000000
--- a/CHANGES/7577.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fix sorting in filter_cookies to use cookie with longest path -- by :user:`marq24`.
diff --git a/CHANGES/7651.bugfix b/CHANGES/7651.bugfix
deleted file mode 100644
index bc013ecf5fa..00000000000
--- a/CHANGES/7651.bugfix
+++ /dev/null
@@ -1 +0,0 @@
-Fixed display of ``BadStatusLine`` messages from ``llhttp`` -- by :user:`Dreamsorcerer`
diff --git a/CHANGES/7663.feature b/CHANGES/7663.feature
deleted file mode 100644
index 509a7ad7e2a..00000000000
--- a/CHANGES/7663.feature
+++ /dev/null
@@ -1 +0,0 @@
-Updated Python parser to comply with latest HTTP specs and allow lax response parsing -- by :user:`Dreamorcerer`
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index 317a47bbad8..8bc7a4aa1bb 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.8.5"
+__version__ = "3.8.6"
from typing import Tuple
From b51610b93b2ae15c4062e3a1680a536ba5f4c5c4 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 26 Oct 2023 11:49:47 +0200
Subject: [PATCH 37/70] [PR #7749/0dc39e46 backport][3.8] Enable testing merge
queues @ GitHub Actions CI/CD (#7750)
Co-authored-by: Sviatoslav Sydorenko
---
.github/workflows/ci-cd.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index 4c0c0e40b83..f6d7e72b6bc 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -1,6 +1,7 @@
name: CI
on:
+ merge_group:
push:
branches:
- 'master'
From 4ad78b3d31bde9fdab22aac2692247f9746e8b48 Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Sat, 14 Jun 2025 10:48:08 -0500
Subject: [PATCH 38/70] Increment version to 3.12.14.dev0 (#11216)
---
aiohttp/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index 58ef7a9a565..cc73fcc2c8e 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.12.13"
+__version__ = "3.12.14.dev0"
from typing import TYPE_CHECKING, Tuple
From 783946094587b24f567fdefbc4040c73665638b1 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 16 Jun 2025 12:08:52 +0000
Subject: [PATCH 39/70] Bump pydantic from 2.11.6 to 2.11.7 (#11219)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.6 to
2.11.7.
Release notes
Sourced from pydantic's
releases.
v2.11.7 2025-06-14
What's Changed
Fixes
Full Changelog: https://github.com/pydantic/pydantic/compare/v2.11.6...v2.11.7
Changelog
Sourced from pydantic's
changelog.
v2.11.7 (2025-06-14)
GitHub
release
What's Changed
Fixes
- Copy
FieldInfo instance if necessary during
FieldInfo build by @Viicos in #11898
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index a731b7c5975..8c9cd742dc8 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -156,7 +156,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-pydantic==2.11.6
+pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 782e58604d0..56391b3c567 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -153,7 +153,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-pydantic==2.11.6
+pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 6031ddeb3f9..ba5bad1016d 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -65,7 +65,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-pydantic==2.11.6
+pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
diff --git a/requirements/test.txt b/requirements/test.txt
index c49dadb2d2d..52cdcaa5187 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -89,7 +89,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-pydantic==2.11.6
+pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
From 8264fce1d94987bc680b7e3b921b425fb6534e16 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 16 Jun 2025 12:14:03 +0000
Subject: [PATCH 40/70] Bump certifi from 2025.4.26 to 2025.6.15 (#11220)
Bumps [certifi](https://github.com/certifi/python-certifi) from
2025.4.26 to 2025.6.15.
Commits
e767d59
2025.06.15 (#357)
3e70765
Bump actions/setup-python from 5.5.0 to 5.6.0
9afd2ff
Bump actions/download-artifact from 4.2.1 to 4.3.0
d7c816c
remove code that's no longer required that 3.7 is our minimum (#351)
1899613
Declare setuptools as the build backend in pyproject.toml (#350)
c874142
update CI for ubuntu 20.04 deprecation (#348)
- See full diff in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/doc-spelling.txt | 2 +-
requirements/doc.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 8c9cd742dc8..4ef96089b1b 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython"
# via -r requirements/runtime-deps.in
build==1.2.2.post1
# via pip-tools
-certifi==2025.4.26
+certifi==2025.6.15
# via requests
cffi==1.17.1
# via
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 56391b3c567..210d4197afe 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -34,7 +34,7 @@ brotli==1.1.0 ; platform_python_implementation == "CPython"
# via -r requirements/runtime-deps.in
build==1.2.2.post1
# via pip-tools
-certifi==2025.4.26
+certifi==2025.6.15
# via requests
cffi==1.17.1
# via
diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt
index d0e3a3c867e..ddf48d4daa0 100644
--- a/requirements/doc-spelling.txt
+++ b/requirements/doc-spelling.txt
@@ -10,7 +10,7 @@ alabaster==1.0.0
# via sphinx
babel==2.17.0
# via sphinx
-certifi==2025.4.26
+certifi==2025.6.15
# via requests
charset-normalizer==3.4.2
# via requests
diff --git a/requirements/doc.txt b/requirements/doc.txt
index d355dd714b6..101dcf49230 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -10,7 +10,7 @@ alabaster==1.0.0
# via sphinx
babel==2.17.0
# via sphinx
-certifi==2025.4.26
+certifi==2025.6.15
# via requests
charset-normalizer==3.4.2
# via requests
From 2ad75686becd36540e440fd8400788feda91a426 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 17 Jun 2025 10:53:23 +0000
Subject: [PATCH 41/70] Bump mypy from 1.16.0 to 1.16.1 (#11222)
Bumps [mypy](https://github.com/python/mypy) from 1.16.0 to 1.16.1.
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 4ef96089b1b..635cc45c5e6 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -118,7 +118,7 @@ multidict==6.4.4
# -r requirements/multidict.in
# -r requirements/runtime-deps.in
# yarl
-mypy==1.16.0 ; implementation_name == "cpython"
+mypy==1.16.1 ; implementation_name == "cpython"
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 210d4197afe..ab90f05da5b 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -115,7 +115,7 @@ multidict==6.4.4
# via
# -r requirements/runtime-deps.in
# yarl
-mypy==1.16.0 ; implementation_name == "cpython"
+mypy==1.16.1 ; implementation_name == "cpython"
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/lint.txt b/requirements/lint.txt
index ba5bad1016d..6dd51053d93 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -45,7 +45,7 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-mypy==1.16.0 ; implementation_name == "cpython"
+mypy==1.16.1 ; implementation_name == "cpython"
# via -r requirements/lint.in
mypy-extensions==1.1.0
# via mypy
diff --git a/requirements/test.txt b/requirements/test.txt
index 52cdcaa5187..e403909fefd 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -63,7 +63,7 @@ multidict==6.4.4
# via
# -r requirements/runtime-deps.in
# yarl
-mypy==1.16.0 ; implementation_name == "cpython"
+mypy==1.16.1 ; implementation_name == "cpython"
# via -r requirements/test.in
mypy-extensions==1.1.0
# via mypy
From ed11b39080c85a75327116fab14ea9305616976c Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 17 Jun 2025 15:00:43 +0000
Subject: [PATCH 42/70] Bump multidict from 6.4.4 to 6.5.0 (#11223)
Bumps [multidict](https://github.com/aio-libs/multidict) from 6.4.4 to
6.5.0.
Release notes
Sourced from multidict's
releases.
6.5.0
Features
-
Replace internal implementation from an array of items to hash table.
algorithmic complexity for lookups is switched from O(N) to O(1).
The hash table is very similar to :class:dict from
CPython but it allows keys duplication.
The benchmark shows 25-50% boost for single lookups, x2-x3 for bulk
updates, and x20 for
some multidict view operations. The gain is not for free:
:class:~multidict.MultiDict.add and
:class:~multidict.MultiDict.extend are 25-50%
slower now. We consider it as acceptable because the lookup is much more
common
operation that addition for the library domain.
Related issues and pull requests on GitHub:
#1128.
Contributor-facing changes
-
Builds have been added for arm64 Windows
wheels and the reusable-build-wheel.yml
template has been modified to allow for
an os value (windows-11-arm) which
does not end with the -latest postfix.
Related issues and pull requests on GitHub:
#1167.
Changelog
Sourced from multidict's
changelog.
6.5.0
(2025-06-17)
Features
-
Replace internal implementation from an array of items to hash table.
algorithmic complexity for lookups is switched from O(N) to O(1).
The hash table is very similar to :class:dict from
CPython but it allows keys duplication.
The benchmark shows 25-50% boost for single lookups, x2-x3 for bulk
updates, and x20 for
some multidict view operations. The gain is not for free:
:class:~multidict.MultiDict.add and
:class:~multidict.MultiDict.extend are 25-50%
slower now. We consider it as acceptable because the lookup is much more
common
operation that addition for the library domain.
Related issues and pull requests on GitHub:
:issue:1128.
Contributor-facing changes
-
Builds have been added for arm64 Windows
wheels and the reusable-build-wheel.yml
template has been modified to allow for
an os value (windows-11-arm) which
does not end with the -latest postfix.
Related issues and pull requests on GitHub:
:issue:1167.
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/base.txt | 2 +-
requirements/constraints.txt | 2 +-
requirements/cython.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/multidict.txt | 2 +-
requirements/runtime-deps.txt | 2 +-
requirements/test.txt | 2 +-
7 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index 46fa8b9fc8d..418b8f2276d 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -26,7 +26,7 @@ gunicorn==23.0.0
# via -r requirements/base.in
idna==3.4
# via yarl
-multidict==6.4.4
+multidict==6.5.0
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 635cc45c5e6..6f2c40b6453 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -113,7 +113,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.4.4
+multidict==6.5.0
# via
# -r requirements/multidict.in
# -r requirements/runtime-deps.in
diff --git a/requirements/cython.txt b/requirements/cython.txt
index e84c784cd77..4a54ecda247 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -6,7 +6,7 @@
#
cython==3.1.2
# via -r requirements/cython.in
-multidict==6.4.4
+multidict==6.5.0
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/dev.txt b/requirements/dev.txt
index ab90f05da5b..2a7e8f14b50 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -111,7 +111,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.4.4
+multidict==6.5.0
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index eed6e123d73..f3b98c2a74d 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -4,7 +4,7 @@
#
# pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in
#
-multidict==6.4.4
+multidict==6.5.0
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index d90bf87245c..e251f6f0033 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -24,7 +24,7 @@ frozenlist==1.7.0
# aiosignal
idna==3.4
# via yarl
-multidict==6.4.4
+multidict==6.5.0
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/test.txt b/requirements/test.txt
index e403909fefd..915bb4b9648 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -59,7 +59,7 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.4.4
+multidict==6.5.0
# via
# -r requirements/runtime-deps.in
# yarl
From 5444eb62f884a839b2548c9b77a815961bbe49c3 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 18 Jun 2025 10:57:52 +0000
Subject: [PATCH 43/70] Bump pytest from 8.4.0 to 8.4.1 (#11229)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.4.0 to
8.4.1.
Release notes
Sourced from pytest's
releases.
8.4.1
pytest 8.4.1 (2025-06-17)
Bug fixes
-
#13461:
Corrected _pytest.terminal.TerminalReporter.isatty to
support
being called as a method. Before it was just a boolean which could
break correct code when using -o log_cli=true).
-
#13477:
Reintroduced
pytest.PytestReturnNotNoneWarning{.interpreted-text
role="class"} which was removed by accident in pytest
[8.4]{.title-ref}.
This warning is raised when a test functions returns a value other
than None, which is often a mistake made by beginners.
See return-not-none{.interpreted-text
role="ref"} for more information.
-
#13497:
Fixed compatibility with Twisted 25+.
Improved documentation
- #13492:
Fixed outdated warning about
faulthandler not working on
Windows.
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 6f2c40b6453..d78eaccc01e 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -175,7 +175,7 @@ pyproject-hooks==1.2.0
# via
# build
# pip-tools
-pytest==8.4.0
+pytest==8.4.1
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 2a7e8f14b50..5ebcbaa1be3 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -170,7 +170,7 @@ pyproject-hooks==1.2.0
# via
# build
# pip-tools
-pytest==8.4.0
+pytest==8.4.1
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 6dd51053d93..59e71fefbbc 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -73,7 +73,7 @@ pygments==2.19.1
# via
# pytest
# rich
-pytest==8.4.0
+pytest==8.4.1
# via
# -r requirements/lint.in
# pytest-codspeed
diff --git a/requirements/test.txt b/requirements/test.txt
index 915bb4b9648..d8245a3fb3e 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -97,7 +97,7 @@ pygments==2.19.1
# via
# pytest
# rich
-pytest==8.4.0
+pytest==8.4.1
# via
# -r requirements/test.in
# pytest-codspeed
From 6aea2ce9ac955ddb33e905d89daab918a255627c Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 19 Jun 2025 10:40:13 +0000
Subject: [PATCH 44/70] Bump urllib3 from 2.4.0 to 2.5.0 (#11230)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.4.0 to 2.5.0.
Release notes
Sourced from urllib3's
releases.
2.5.0
🚀 urllib3 is fundraising for HTTP/2 support
urllib3
is raising ~$40,000 USD to release HTTP/2 support and ensure
long-term sustainable maintenance of the project after a sharp decline
in financial support. If your company or organization uses Python and
would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and
thousands of other projects please consider contributing
financially to ensure HTTP/2 support is developed sustainably and
maintained for the long-haul.
Thank you for your support.
Security issues
urllib3 2.5.0 fixes two moderate security issues:
- Pool managers now properly control redirects when
retries is passed — CVE-2025-50181 reported by @sandumjacob
(5.3 Medium, GHSA-pq67-6m6q-mj2v)
- Redirects are now controlled by urllib3 in the Node.js runtime —
CVE-2025-50182 (5.3 Medium, GHSA-48p4-8xcf-vxj5)
Features
- Added support for the
compression.zstd module that is
new in Python 3.14. See PEP
784 for more information. (#3610)
- Added support for version 0.5 of
hatch-vcs (#3612)
Bugfixes
- Raised exception for
HTTPResponse.shutdown on a
connection already released to the pool. (#3581)
- Fixed incorrect
CONNECT statement when using an IPv6
proxy with connection_from_host. Previously would not be
wrapped in []. (#3615)
Changelog
Sourced from urllib3's
changelog.
2.5.0 (2025-06-18)
Features
- Added support for the
compression.zstd module that is
new in Python 3.14.
See PEP 784 <https://peps.python.org/pep-0784/>_ for
more information.
([#3610](https://github.com/urllib3/urllib3/issues/3610)
<https://github.com/urllib3/urllib3/issues/3610>__)
- Added support for version 0.5 of
hatch-vcs
([#3612](https://github.com/urllib3/urllib3/issues/3612)
<https://github.com/urllib3/urllib3/issues/3612>__)
Bugfixes
- Fixed a security issue where restricting the maximum number of
followed
redirects at the
urllib3.PoolManager level via the
retries parameter
did not work.
- Made the Node.js runtime respect redirect parameters such as
retries
and redirects.
- Raised exception for
HTTPResponse.shutdown on a
connection already released to the pool.
([#3581](https://github.com/urllib3/urllib3/issues/3581)
<https://github.com/urllib3/urllib3/issues/3581>__)
- Fixed incorrect
CONNECT statement when using an IPv6
proxy with connection_from_host. Previously would not be
wrapped in [].
([#3615](https://github.com/urllib3/urllib3/issues/3615)
<https://github.com/urllib3/urllib3/issues/3615>__)
Commits
aaab4ec
Release 2.5.0
7eb4a2a
Merge commit from fork
f05b132
Merge commit from fork
d03fe32
Fix HTTP tunneling with IPv6 in older Python versions
11661e9
Bump github/codeql-action from 3.28.0 to 3.29.0 (#3624)
6a0ecc6
Update v2 migration guide to 2.4.0 (#3621)
8e32e60
Raise exception for shutdown on a connection already released to the
pool (#3...
9996e0f
Fix emscripten CI for Chrome 137+ (#3599)
4fd1a99
Bump RECENT_DATE (#3617)
c4b5917
Add support for the new compression.zstd module in Python
3.14 (#3611)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/doc-spelling.txt | 2 +-
requirements/doc.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index d78eaccc01e..46eb547cbe6 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -280,7 +280,7 @@ typing-inspection==0.4.1
# via pydantic
uritemplate==4.2.0
# via gidgethub
-urllib3==2.4.0
+urllib3==2.5.0
# via requests
uvloop==0.21.0 ; platform_system != "Windows"
# via
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 5ebcbaa1be3..0bce2019059 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -271,7 +271,7 @@ typing-inspection==0.4.1
# via pydantic
uritemplate==4.2.0
# via gidgethub
-urllib3==2.4.0
+urllib3==2.5.0
# via requests
uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython"
# via
diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt
index ddf48d4daa0..9fb92b65900 100644
--- a/requirements/doc-spelling.txt
+++ b/requirements/doc-spelling.txt
@@ -69,5 +69,5 @@ towncrier==24.8.0
# via
# -r requirements/doc.in
# sphinxcontrib-towncrier
-urllib3==2.4.0
+urllib3==2.5.0
# via requests
diff --git a/requirements/doc.txt b/requirements/doc.txt
index 101dcf49230..4afaeef00c3 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -62,5 +62,5 @@ towncrier==24.8.0
# via
# -r requirements/doc.in
# sphinxcontrib-towncrier
-urllib3==2.4.0
+urllib3==2.5.0
# via requests
From 6179c76bd74b8d70f41b7f3e99404c1d3999a748 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 23 Jun 2025 12:17:31 +0000
Subject: [PATCH 45/70] Bump pygments from 2.19.1 to 2.19.2 (#11238)
Bumps [pygments](https://github.com/pygments/pygments) from 2.19.1 to
2.19.2.
Release notes
Sourced from pygments's
releases.
2.19.2
- Lua: Fix regression introduced in 2.19.0 (#2882,
#2839)
Changelog
Sourced from pygments's
changelog.
Version 2.19.2
(released June 21st, 2025)
- Lua: Fix regression introduced in 2.19.0 (#2882,
#2839)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/doc-spelling.txt | 2 +-
requirements/doc.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
6 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 46eb547cbe6..c979fc0fedf 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -162,7 +162,7 @@ pydantic-core==2.33.2
# via pydantic
pyenchant==3.2.2
# via sphinxcontrib-spelling
-pygments==2.19.1
+pygments==2.19.2
# via
# pytest
# rich
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 0bce2019059..8f1d4acc4f8 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -157,7 +157,7 @@ pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
-pygments==2.19.1
+pygments==2.19.2
# via
# pytest
# rich
diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt
index 9fb92b65900..80043477f4c 100644
--- a/requirements/doc-spelling.txt
+++ b/requirements/doc-spelling.txt
@@ -32,7 +32,7 @@ packaging==25.0
# via sphinx
pyenchant==3.2.2
# via sphinxcontrib-spelling
-pygments==2.19.1
+pygments==2.19.2
# via sphinx
requests==2.32.4
# via
diff --git a/requirements/doc.txt b/requirements/doc.txt
index 4afaeef00c3..4ecc88e90d7 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -30,7 +30,7 @@ markupsafe==3.0.2
# via jinja2
packaging==25.0
# via sphinx
-pygments==2.19.1
+pygments==2.19.2
# via sphinx
requests==2.32.4
# via sphinx
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 59e71fefbbc..5762edc6892 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -69,7 +69,7 @@ pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
-pygments==2.19.1
+pygments==2.19.2
# via
# pytest
# rich
diff --git a/requirements/test.txt b/requirements/test.txt
index d8245a3fb3e..37e0fdbfa47 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -93,7 +93,7 @@ pydantic==2.11.7
# via python-on-whales
pydantic-core==2.33.2
# via pydantic
-pygments==2.19.1
+pygments==2.19.2
# via
# pytest
# rich
From 39e2b77c091c340d0271e5c0e53dcd2dd87ba335 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 23 Jun 2025 12:24:06 +0000
Subject: [PATCH 46/70] Bump sigstore/gh-action-sigstore-python from 3.0.0 to
3.0.1 (#11239)
Bumps
[sigstore/gh-action-sigstore-python](https://github.com/sigstore/gh-action-sigstore-python)
from 3.0.0 to 3.0.1.
Release notes
Sourced from sigstore/gh-action-sigstore-python's
releases.
v3.0.1
Changed
- The minimum Python version supported by this action is now 3.9
(#155)
- The action's Python dependencies are now fully pinned to specific
versions
(#165)
Fixed
- The
rfc3161-client dependency has been upgraded to
1.0.3 to resolve
a security vulnerability
(#182)
Changelog
Sourced from sigstore/gh-action-sigstore-python's
changelog.
[3.0.1]
Changed
- The minimum Python version supported by this action is now 3.9
(#155)
- The action's Python dependencies are now fully pinned to specific
versions
(#165)
Fixed
- The
rfc3161-client dependency has been upgrades to
1.0.3 to resolve
a security vulnerability
(#182)
Commits
f7ad0af
chore: prep 3.0.1 (#183)
7dad330
build(deps): bump rfc3161-client from 1.0.2 to 1.0.3 in /requirements
(#182)
deae7b0
build(deps): bump astral-sh/setup-uv in the actions group (#181)
f38fa95
build(deps): bump urllib3 from 2.4.0 to 2.5.0 in /requirements (#180)
6409abb
build(deps): bump the actions group with 2 updates (#178)
d7c8f99
build(deps): bump softprops/action-gh-release in the actions group (#177)
e346064
build(deps): bump requests from 2.32.3 to 2.32.4 in /requirements (#176)
cbd4d80
Update Python dependencies (#174)
250d174
build(deps): bump github/codeql-action in the actions group (#172)
42bbcff
build(deps): bump astral-sh/setup-uv in the actions group (#171)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/ci-cd.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index c76fbf3df7a..b1a11b3e38b 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -481,7 +481,7 @@ jobs:
uses: pypa/gh-action-pypi-publish@release/v1
- name: Sign the dists with Sigstore
- uses: sigstore/gh-action-sigstore-python@v3.0.0
+ uses: sigstore/gh-action-sigstore-python@v3.0.1
with:
inputs: >-
./dist/*.tar.gz
From b7dadd9ffd4265d23f5ce3f9b25c3026900450d1 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 25 Jun 2025 10:49:56 +0000
Subject: [PATCH 47/70] Bump multidict from 6.5.0 to 6.5.1 (#11241)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [multidict](https://github.com/aio-libs/multidict) from 6.5.0 to
6.5.1.
Release notes
Sourced from multidict's
releases.
6.5.1
Bug fixes
-
Fixed a bug in C implementation when multidict is resized and it has
deleted slots.
The bug was introduced by multidict 6.5.0 release.
Patch by :user:asvetlov.
Related issues and pull requests on GitHub:
#1195.
Contributor-facing changes
-
A pair of code formatters for Python and C have been configured in
the pre-commit tool.
Related issues and pull requests on GitHub:
#1123.
-
Shorted fixture parametrization ids.
For example,
test_keys_view_xor[case-insensitive-pure-python-module]
becomes test_keys_view_xor[ci-py] -- by
:user:asvetlov.
Related issues and pull requests on GitHub:
#1192.
-
The :file:reusable-cibuildwheel.yml workflow has been
refactored to
be more generic and :file:ci-cd.yml now holds all the
configuration
toggles -- by :user:webknjaz.
Related issues and pull requests on GitHub:
#1193.
Changelog
Sourced from multidict's
changelog.
6.5.1
(2025-06-24)
Bug fixes
-
Fixed a bug in C implementation when multidict is resized and it has
deleted slots.
The bug was introduced by multidict 6.5.0 release.
Patch by :user:asvetlov.
Related issues and pull requests on GitHub:
:issue:1195.
Contributor-facing changes
-
A pair of code formatters for Python and C have been configured in
the pre-commit tool.
Related issues and pull requests on GitHub:
:issue:1123.
-
Shorted fixture parametrization ids.
For example,
test_keys_view_xor[case-insensitive-pure-python-module]
becomes test_keys_view_xor[ci-py] -- by
:user:asvetlov.
Related issues and pull requests on GitHub:
:issue:1192.
-
The :file:reusable-cibuildwheel.yml workflow has been
refactored to
be more generic and :file:ci-cd.yml now holds all the
configuration
toggles -- by :user:webknjaz.
Related issues and pull requests on GitHub:
:issue:1193.
Commits
df0379f
Release 6.5.1 (#1197)
964d65f
Issue 1195 (#1196)
c90fade
Fix md_get_all() return value (#1194)
eeb048f
Merge branch 'maintenance/generic-gha-cibuildwheel'
0072f7e
📝 Add a change note for PR #1193
7fa1318
🧪 Generalize cibuildwheel config w/ env vars
2f05c7b
🧪 Implement CI UI build matrix groups
661a5e1
Surface timeout-minutes in reusable-cibuildwheel
67a8cb2
Replace reusable-cibuildwheel tag
w/check-name
9ed7eb1
Make hash compute sparse @ reusable-cibuildwheel
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/base.txt | 2 +-
requirements/constraints.txt | 2 +-
requirements/cython.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/multidict.txt | 2 +-
requirements/runtime-deps.txt | 2 +-
requirements/test.txt | 2 +-
7 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index 418b8f2276d..c5e0982246a 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -26,7 +26,7 @@ gunicorn==23.0.0
# via -r requirements/base.in
idna==3.4
# via yarl
-multidict==6.5.0
+multidict==6.5.1
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index c979fc0fedf..db2dda1c51b 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -113,7 +113,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.0
+multidict==6.5.1
# via
# -r requirements/multidict.in
# -r requirements/runtime-deps.in
diff --git a/requirements/cython.txt b/requirements/cython.txt
index 4a54ecda247..a47eb4689ca 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -6,7 +6,7 @@
#
cython==3.1.2
# via -r requirements/cython.in
-multidict==6.5.0
+multidict==6.5.1
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 8f1d4acc4f8..aba179f2ea9 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -111,7 +111,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.0
+multidict==6.5.1
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index f3b98c2a74d..30d464b352d 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -4,7 +4,7 @@
#
# pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in
#
-multidict==6.5.0
+multidict==6.5.1
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index e251f6f0033..dbf99b349fe 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -24,7 +24,7 @@ frozenlist==1.7.0
# aiosignal
idna==3.4
# via yarl
-multidict==6.5.0
+multidict==6.5.1
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/test.txt b/requirements/test.txt
index 37e0fdbfa47..34f767a6dd4 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -59,7 +59,7 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.0
+multidict==6.5.1
# via
# -r requirements/runtime-deps.in
# yarl
From ff09341b49dfc9d022e8db00ed5545d70c081e93 Mon Sep 17 00:00:00 2001
From: KGuillaume-chaps
Date: Thu, 26 Jun 2025 19:54:53 +0200
Subject: [PATCH 48/70]
Patchback/backports/3.13/4872fce3426119e63e1a892c39b474786dafddac/pr 11161
(#11232)
Co-authored-by: kge
---
.pre-commit-config.yaml | 4 +++
CHANGES/11161.feature.rst | 2 ++
CONTRIBUTORS.txt | 1 +
aiohttp/_http_parser.pyx | 2 +-
aiohttp/client_reqrep.py | 12 +++++++--
aiohttp/compression_utils.py | 31 +++++++++++++++++++++++
aiohttp/http_parser.py | 19 +++++++++++---
docs/client_quickstart.rst | 4 +++
docs/spelling_wordlist.txt | 2 ++
requirements/doc.txt | 2 +-
requirements/lint.in | 1 +
requirements/lint.txt | 15 +++--------
requirements/runtime-deps.in | 1 +
requirements/runtime-deps.txt | 10 +++-----
setup.cfg | 1 +
tests/test_client_request.py | 17 ++++++++-----
tests/test_http_parser.py | 47 +++++++++++++++++++++++++++++++----
17 files changed, 135 insertions(+), 36 deletions(-)
create mode 100644 CHANGES/11161.feature.rst
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0edf03d8db7..b5a67394b80 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -55,6 +55,10 @@ repos:
rev: v1.5.0
hooks:
- id: yesqa
+ additional_dependencies:
+ - flake8-docstrings==1.6.0
+ - flake8-no-implicit-concat==0.3.4
+ - flake8-requirements==1.7.8
- repo: https://github.com/PyCQA/isort
rev: '5.13.2'
hooks:
diff --git a/CHANGES/11161.feature.rst b/CHANGES/11161.feature.rst
new file mode 100644
index 00000000000..617c4147a38
--- /dev/null
+++ b/CHANGES/11161.feature.rst
@@ -0,0 +1,2 @@
+Add support for Zstandard (aka Zstd) compression
+-- by :user:`KGuillaume-chaps`.
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 6b9f7b124bd..6d6565cf99e 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -211,6 +211,7 @@ Justin Foo
Justin Turner Arthur
Kay Zheng
Kevin Samuel
+Kilian Guillaume
Kimmo Parviainen-Jalanko
Kirill Klenov
Kirill Malovitsa
diff --git a/aiohttp/_http_parser.pyx b/aiohttp/_http_parser.pyx
index 16893f00e74..f0724fcf4ca 100644
--- a/aiohttp/_http_parser.pyx
+++ b/aiohttp/_http_parser.pyx
@@ -437,7 +437,7 @@ cdef class HttpParser:
if enc is not None:
self._content_encoding = None
enc = enc.lower()
- if enc in ('gzip', 'deflate', 'br'):
+ if enc in ('gzip', 'deflate', 'br', 'zstd'):
encoding = enc
if self._cparser.type == cparser.HTTP_REQUEST:
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 3209440b53d..4f6ce30a567 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -45,7 +45,7 @@
InvalidURL,
ServerFingerprintMismatch,
)
-from .compression_utils import HAS_BROTLI
+from .compression_utils import HAS_BROTLI, HAS_ZSTD
from .formdata import FormData
from .helpers import (
_SENTINEL,
@@ -104,7 +104,15 @@
def _gen_default_accept_encoding() -> str:
- return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
+ encodings = [
+ "gzip",
+ "deflate",
+ ]
+ if HAS_BROTLI:
+ encodings.append("br")
+ if HAS_ZSTD:
+ encodings.append("zstd")
+ return ", ".join(encodings)
@attr.s(auto_attribs=True, frozen=True, slots=True)
diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py
index f08c3d9cdff..cdede4244b4 100644
--- a/aiohttp/compression_utils.py
+++ b/aiohttp/compression_utils.py
@@ -21,6 +21,18 @@
except ImportError: # pragma: no cover
HAS_BROTLI = False
+if sys.version_info >= (3, 14):
+ import compression.zstd # noqa: I900
+
+ HAS_ZSTD = True
+else:
+ try:
+ import zstandard
+
+ HAS_ZSTD = True
+ except ImportError:
+ HAS_ZSTD = False
+
MAX_SYNC_CHUNK_SIZE = 1024
@@ -276,3 +288,22 @@ def flush(self) -> bytes:
if hasattr(self._obj, "flush"):
return cast(bytes, self._obj.flush())
return b""
+
+
+class ZSTDDecompressor:
+ def __init__(self) -> None:
+ if not HAS_ZSTD:
+ raise RuntimeError(
+ "The zstd decompression is not available. "
+ "Please install `zstandard` module"
+ )
+ if sys.version_info >= (3, 14):
+ self._obj = compression.zstd.ZstdDecompressor()
+ else:
+ self._obj = zstandard.ZstdDecompressor()
+
+ def decompress_sync(self, data: bytes) -> bytes:
+ return self._obj.decompress(data)
+
+ def flush(self) -> bytes:
+ return b""
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index db61ab5264c..93b1f376437 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -26,7 +26,13 @@
from . import hdrs
from .base_protocol import BaseProtocol
-from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
+from .compression_utils import (
+ HAS_BROTLI,
+ HAS_ZSTD,
+ BrotliDecompressor,
+ ZLibDecompressor,
+ ZSTDDecompressor,
+)
from .helpers import (
_EXC_SENTINEL,
DEBUG,
@@ -539,7 +545,7 @@ def parse_headers(
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
- if enc in ("gzip", "deflate", "br"):
+ if enc in ("gzip", "deflate", "br", "zstd"):
encoding = enc
# chunking
@@ -957,7 +963,7 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
self.encoding = encoding
self._started_decoding = False
- self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor, ZSTDDecompressor]
if encoding == "br":
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
@@ -965,6 +971,13 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
"Please install `Brotli`"
)
self.decompressor = BrotliDecompressor()
+ elif encoding == "zstd":
+ if not HAS_ZSTD:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: zstandard (zstd). "
+ "Please install `zstandard`"
+ )
+ self.decompressor = ZSTDDecompressor()
else:
self.decompressor = ZLibDecompressor(encoding=encoding)
diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst
index 0e03f104e90..77ebaab0369 100644
--- a/docs/client_quickstart.rst
+++ b/docs/client_quickstart.rst
@@ -187,6 +187,10 @@ You can enable ``brotli`` transfer-encodings support,
just install `Brotli `_
or `brotlicffi `_.
+You can enable ``zstd`` transfer-encodings support,
+install `zstandard `_.
+If you are using Python >= 3.14, no dependency should be required.
+
JSON Request
============
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 3fd6cdd00fc..399630356d9 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -384,3 +384,5 @@ www
xxx
yarl
zlib
+zstandard
+zstd
diff --git a/requirements/doc.txt b/requirements/doc.txt
index 4ecc88e90d7..664333abe7d 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -18,7 +18,7 @@ click==8.1.8
# via towncrier
docutils==0.21.2
# via sphinx
-idna==3.4
+idna==3.10
# via requests
imagesize==1.4.1
# via sphinx
diff --git a/requirements/lint.in b/requirements/lint.in
index fe996d00176..9663712907d 100644
--- a/requirements/lint.in
+++ b/requirements/lint.in
@@ -13,3 +13,4 @@ trustme
uvloop; platform_system != "Windows"
valkey
zlib_ng
+zstandard; implementation_name == "cpython"
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 5762edc6892..22eccd8a9c3 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -1,5 +1,5 @@
#
-# This file is autogenerated by pip-compile with python 3.10
+# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --allow-unsafe --output-file=requirements/lint.txt --resolver=backtracking --strip-extras requirements/lint.in
@@ -8,8 +8,6 @@ aiodns==3.5.0
# via -r requirements/lint.in
annotated-types==0.7.0
# via pydantic
-async-timeout==5.0.1
- # via valkey
blockbuster==1.5.24
# via -r requirements/lint.in
cffi==1.17.1
@@ -25,8 +23,6 @@ cryptography==45.0.4
# via trustme
distlib==0.3.9
# via virtualenv
-exceptiongroup==1.3.0
- # via pytest
filelock==3.18.0
# via virtualenv
forbiddenfruit==0.1.4
@@ -94,21 +90,14 @@ six==1.17.0
# via python-dateutil
slotscheck==0.19.1
# via -r requirements/lint.in
-tomli==2.2.1
- # via
- # mypy
- # pytest
- # slotscheck
trustme==1.2.1
# via -r requirements/lint.in
typing-extensions==4.14.0
# via
- # exceptiongroup
# mypy
# pydantic
# pydantic-core
# python-on-whales
- # rich
# typing-inspection
typing-inspection==0.4.1
# via pydantic
@@ -120,3 +109,5 @@ virtualenv==20.31.2
# via pre-commit
zlib-ng==0.5.1
# via -r requirements/lint.in
+zstandard==0.23.0 ; implementation_name == "cpython"
+ # via -r requirements/lint.in
diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in
index 7b0382a7a2b..9f254fc7e02 100644
--- a/requirements/runtime-deps.in
+++ b/requirements/runtime-deps.in
@@ -11,3 +11,4 @@ frozenlist >= 1.1.1
multidict >=4.5, < 7.0
propcache >= 0.2.0
yarl >= 1.17.0, < 2.0
+zstandard; platform_python_implementation == 'CPython' and python_version < "3.14"
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index dbf99b349fe..e1c6c4c3814 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -1,5 +1,5 @@
#
-# This file is autogenerated by pip-compile with Python 3.10
+# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --allow-unsafe --output-file=requirements/runtime-deps.txt --strip-extras requirements/runtime-deps.in
@@ -10,8 +10,6 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiosignal==1.3.2
# via -r requirements/runtime-deps.in
-async-timeout==5.0.1 ; python_version < "3.11"
- # via -r requirements/runtime-deps.in
attrs==25.3.0
# via -r requirements/runtime-deps.in
brotli==1.1.0 ; platform_python_implementation == "CPython"
@@ -22,7 +20,7 @@ frozenlist==1.7.0
# via
# -r requirements/runtime-deps.in
# aiosignal
-idna==3.4
+idna==3.10
# via yarl
multidict==6.5.1
# via
@@ -36,7 +34,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-typing-extensions==4.14.0
- # via multidict
yarl==1.20.1
# via -r requirements/runtime-deps.in
+zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
+ # via -r requirements/runtime-deps.in
diff --git a/setup.cfg b/setup.cfg
index 4adfde579a0..c8d17cdc162 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -68,6 +68,7 @@ speedups =
aiodns >= 3.3.0
Brotli; platform_python_implementation == 'CPython'
brotlicffi; platform_python_implementation != 'CPython'
+ zstandard; platform_python_implementation == 'CPython' and python_version < "3.14"
[options.packages.find]
exclude =
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index 2af540599f8..950dd93aeb6 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -355,7 +355,7 @@ def test_headers(make_request) -> None:
assert hdrs.CONTENT_TYPE in req.headers
assert req.headers[hdrs.CONTENT_TYPE] == "text/plain"
- assert req.headers[hdrs.ACCEPT_ENCODING] == "gzip, deflate, br"
+ assert "gzip" in req.headers[hdrs.ACCEPT_ENCODING]
def test_headers_list(make_request) -> None:
@@ -1529,15 +1529,20 @@ def test_loose_cookies_types(loop) -> None:
@pytest.mark.parametrize(
- "has_brotli,expected",
+ "has_brotli,has_zstd,expected",
[
- (False, "gzip, deflate"),
- (True, "gzip, deflate, br"),
+ (False, False, "gzip, deflate"),
+ (True, False, "gzip, deflate, br"),
+ (False, True, "gzip, deflate, zstd"),
+ (True, True, "gzip, deflate, br, zstd"),
],
)
-def test_gen_default_accept_encoding(has_brotli, expected) -> None:
+def test_gen_default_accept_encoding(
+ has_brotli: bool, has_zstd: bool, expected: str
+) -> None:
with mock.patch("aiohttp.client_reqrep.HAS_BROTLI", has_brotli):
- assert _gen_default_accept_encoding() == expected
+ with mock.patch("aiohttp.client_reqrep.HAS_ZSTD", has_zstd):
+ assert _gen_default_accept_encoding() == expected
@pytest.mark.parametrize(
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 58fef625f82..a7cdbc311c5 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -2,6 +2,7 @@
import asyncio
import re
+import sys
from contextlib import nullcontext
from typing import Any, Dict, List
from unittest import mock
@@ -32,6 +33,13 @@
except ImportError:
brotli = None
+if sys.version_info >= (3, 14):
+ import compression.zstd as zstandard # noqa: I900
+else:
+ try:
+ import zstandard
+ except ImportError:
+ zstandard = None # type: ignore[assignment]
REQUEST_PARSERS = [HttpRequestParserPy]
RESPONSE_PARSERS = [HttpResponseParserPy]
@@ -585,7 +593,15 @@ def test_compression_brotli(parser) -> None:
assert msg.compression == "br"
-def test_compression_unknown(parser) -> None:
+@pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
+def test_compression_zstd(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\ncontent-encoding: zstd\r\n\r\n"
+ messages, upgrade, tail = parser.feed_data(text)
+ msg = messages[0][0]
+ assert msg.compression == "zstd"
+
+
+def test_compression_unknown(parser: HttpRequestParser) -> None:
text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n"
messages, upgrade, tail = parser.feed_data(text)
msg = messages[0][0]
@@ -1794,10 +1810,19 @@ async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None:
assert b"brotli data" == out._buffer[0]
assert out.is_eof()
+ @pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
+ async def test_http_payload_zstandard(self, protocol: BaseProtocol) -> None:
+ compressed = zstandard.compress(b"zstd data")
+ out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
+ p = HttpPayloadParser(out, length=len(compressed), compression="zstd")
+ p.feed_data(compressed)
+ assert b"zstd data" == out._buffer[0]
+ assert out.is_eof()
+
class TestDeflateBuffer:
- async def test_feed_data(self, stream) -> None:
- buf = aiohttp.StreamReader(stream, 2**16, loop=asyncio.get_event_loop())
+ async def test_feed_data(self, protocol: BaseProtocol) -> None:
+ buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
dbuf = DeflateBuffer(buf, "deflate")
dbuf.decompressor = mock.Mock()
@@ -1807,8 +1832,8 @@ async def test_feed_data(self, stream) -> None:
dbuf.feed_data(b"xxxx", 4)
assert [b"line"] == list(buf._buffer)
- async def test_feed_data_err(self, stream) -> None:
- buf = aiohttp.StreamReader(stream, 2**16, loop=asyncio.get_event_loop())
+ async def test_feed_data_err(self, protocol: BaseProtocol) -> None:
+ buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
dbuf = DeflateBuffer(buf, "deflate")
exc = ValueError()
@@ -1864,6 +1889,18 @@ async def test_feed_eof_no_err_brotli(self, protocol: BaseProtocol) -> None:
dbuf.feed_eof()
assert [b"line"] == list(buf._buffer)
+ @pytest.mark.skipif(zstandard is None, reason="zstandard is not installed")
+ async def test_feed_eof_no_err_zstandard(self, protocol: BaseProtocol) -> None:
+ buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
+ dbuf = DeflateBuffer(buf, "zstd")
+
+ dbuf.decompressor = mock.Mock()
+ dbuf.decompressor.flush.return_value = b"line"
+ dbuf.decompressor.eof = False
+
+ dbuf.feed_eof()
+ assert [b"line"] == list(buf._buffer)
+
async def test_empty_body(self, protocol: BaseProtocol) -> None:
buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
dbuf = DeflateBuffer(buf, "deflate")
From 9d61fc4846843730a25f165e817b953e0f3b0a52 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Sun, 29 Jun 2025 00:45:06 +0100
Subject: [PATCH 49/70] [PR #11243/f01cb5e1 backport][3.13] Ignore empty parts
when parsing Content-Disposition header (#11248)
**This is a backport of PR #11243 as merged into master
(f01cb5e16147a38f20ff16932c2c6bb8ea5c783c).**
Co-authored-by: Pierre-Louis Peeters
---
CHANGES/11243.bugfix | 2 ++
aiohttp/multipart.py | 4 ++++
tests/test_client_response.py | 29 +++++++++++++++++++++++++++++
3 files changed, 35 insertions(+)
create mode 100644 CHANGES/11243.bugfix
diff --git a/CHANGES/11243.bugfix b/CHANGES/11243.bugfix
new file mode 100644
index 00000000000..98ae195bb16
--- /dev/null
+++ b/CHANGES/11243.bugfix
@@ -0,0 +1,2 @@
+Updated `Content-Disposition` header parsing to handle trailing semicolons and empty parts
+-- by :user:`PLPeeters`.
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
index 79f8481ee30..2eb22a595b3 100644
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -114,6 +114,10 @@ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
while parts:
item = parts.pop(0)
+ if not item: # To handle trailing semicolons
+ warnings.warn(BadContentDispositionHeader(header))
+ continue
+
if "=" not in item:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
diff --git a/tests/test_client_response.py b/tests/test_client_response.py
index 2d70feaf06d..a5061e08fe1 100644
--- a/tests/test_client_response.py
+++ b/tests/test_client_response.py
@@ -15,6 +15,7 @@
from aiohttp import ClientSession, hdrs, http
from aiohttp.client_reqrep import ClientResponse, RequestInfo
from aiohttp.helpers import TimerNoop
+from aiohttp.multipart import BadContentDispositionHeader
class WriterMock(mock.AsyncMock):
@@ -965,6 +966,34 @@ def test_content_disposition_no_parameters() -> None:
assert {} == response.content_disposition.parameters
+@pytest.mark.parametrize(
+ "content_disposition",
+ (
+ 'attachment; filename="archive.tar.gz";',
+ 'attachment;; filename="archive.tar.gz"',
+ ),
+)
+def test_content_disposition_empty_parts(content_disposition: str) -> None:
+ response = ClientResponse(
+ "get",
+ URL("http://def-cl-resp.org"),
+ request_info=mock.Mock(),
+ writer=WriterMock(),
+ continue100=None,
+ timer=TimerNoop(),
+ traces=[],
+ loop=mock.Mock(),
+ session=mock.Mock(),
+ )
+ h = {"Content-Disposition": content_disposition}
+ response._headers = CIMultiDictProxy(CIMultiDict(h))
+
+ with pytest.warns(BadContentDispositionHeader):
+ assert response.content_disposition is not None
+ assert "attachment" == response.content_disposition.type
+ assert "archive.tar.gz" == response.content_disposition.filename
+
+
def test_content_disposition_no_header() -> None:
response = ClientResponse(
"get",
From 84a2bf121836b29cdf8b2a9aa3308cb6dcc9fb35 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 30 Jun 2025 13:58:50 +0000
Subject: [PATCH 50/70] Bump multidict from 6.5.1 to 6.6.2 (#11251)
Bumps [multidict](https://github.com/aio-libs/multidict) from 6.5.1 to
6.6.2.
Release notes
Sourced from multidict's
releases.
6.6.2
Bug fixes
-
Fixed a memory corruption issue in the C implementation of
_md_shrink() that could lead to segmentation faults and
data loss when items were deleted from a
:class:~multidict.MultiDict. The issue was an edge case in
the pointer arithmetic during the compaction phase -- by
:user:bdraco.
Related issues and pull requests on GitHub:
#1221,
#1222.
-
Fixed format string compilation errors in debug builds on 32-bit
platforms by using portable %zd format specifiers for
Py_ssize_t values instead of %ld -- by
:user:bdraco.
Related issues and pull requests on GitHub:
#1225,
#1226.
Packaging updates and notes for downstreams
-
Re-enabled 32-bit Linux wheel builds that were disabled by default in
cibuildwheel 3.0.0 -- by :user:bdraco.
Related issues and pull requests on GitHub:
#1225,
#1227.
6.6.1
Bug fixes
-
If :meth:multidict.MultiDict.extend,
:meth:multidict.MultiDict.merge, or
:meth:multidict.MultiDict.update raises an exception, now
the multidict internal state is correctly restored.
Patch by :user:asvetlov.
Related issues and pull requests on GitHub:
#1215.
Contributor-facing changes
-
Fixed setuptools deprecation warning about the license
specification -- by :user:asvetlov.
Related issues and pull requests on GitHub:
#1216.
-
Fix compiler warnings and convert them to errors -- by
:user:asvetlov.
Related issues and pull requests on GitHub:
#1217.
... (truncated)
Changelog
Sourced from multidict's
changelog.
6.6.2
(2025-06-28)
Bug fixes
-
Fixed a memory corruption issue in the C implementation of
_md_shrink() that could lead to segmentation faults and
data loss when items were deleted from a
:class:~multidict.MultiDict. The issue was an edge case in
the pointer arithmetic during the compaction phase -- by
:user:bdraco.
Related issues and pull requests on GitHub:
:issue:1221, :issue:1222.
-
Fixed format string compilation errors in debug builds on 32-bit
platforms by using portable %zd format specifiers for
Py_ssize_t values instead of %ld -- by
:user:bdraco.
Related issues and pull requests on GitHub:
:issue:1225, :issue:1226.
Packaging updates and notes for downstreams
-
Re-enabled 32-bit Linux wheel builds that were disabled by default in
cibuildwheel 3.0.0 -- by :user:bdraco.
Related issues and pull requests on GitHub:
:issue:1225, :issue:1227.
6.6.1
(2025-06-28)
Bug fixes
-
If :meth:multidict.MultiDict.extend,
:meth:multidict.MultiDict.merge, or
:meth:multidict.MultiDict.update raises an exception, now
the multidict internal state is correctly restored.
Patch by :user:asvetlov.
Related issues and pull requests on GitHub:
:issue:1215.
Contributor-facing changes
... (truncated)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/base.txt | 4 +++-
requirements/constraints.txt | 6 +++++-
requirements/cython.txt | 2 +-
requirements/dev.txt | 6 +++++-
requirements/multidict.txt | 2 +-
requirements/runtime-deps.txt | 6 +++++-
requirements/test.txt | 4 +++-
7 files changed, 23 insertions(+), 7 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index c5e0982246a..8c4a9d08b20 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -26,7 +26,7 @@ gunicorn==23.0.0
# via -r requirements/base.in
idna==3.4
# via yarl
-multidict==6.5.1
+multidict==6.6.2
# via
# -r requirements/runtime-deps.in
# yarl
@@ -46,3 +46,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho
# via -r requirements/base.in
yarl==1.20.1
# via -r requirements/runtime-deps.in
+zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
+ # via -r requirements/runtime-deps.in
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index db2dda1c51b..dbc3b14fd4b 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -113,7 +113,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.1
+multidict==6.6.2
# via
# -r requirements/multidict.in
# -r requirements/runtime-deps.in
@@ -300,6 +300,10 @@ zlib-ng==0.5.1
# via
# -r requirements/lint.in
# -r requirements/test.in
+zstandard==0.23.0 ; implementation_name == "cpython"
+ # via
+ # -r requirements/lint.in
+ # -r requirements/runtime-deps.in
# The following packages are considered to be unsafe in a requirements file:
pip==25.1.1
diff --git a/requirements/cython.txt b/requirements/cython.txt
index a47eb4689ca..b74caa3bc61 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -6,7 +6,7 @@
#
cython==3.1.2
# via -r requirements/cython.in
-multidict==6.5.1
+multidict==6.6.2
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/dev.txt b/requirements/dev.txt
index aba179f2ea9..1a2bb685fc0 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -111,7 +111,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.1
+multidict==6.6.2
# via
# -r requirements/runtime-deps.in
# yarl
@@ -291,6 +291,10 @@ zlib-ng==0.5.1
# via
# -r requirements/lint.in
# -r requirements/test.in
+zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
+ # via
+ # -r requirements/lint.in
+ # -r requirements/runtime-deps.in
# The following packages are considered to be unsafe in a requirements file:
pip==25.1.1
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index 30d464b352d..5868b9f430f 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -4,7 +4,7 @@
#
# pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in
#
-multidict==6.5.1
+multidict==6.6.2
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index e1c6c4c3814..2df46388d6a 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -10,6 +10,8 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiosignal==1.3.2
# via -r requirements/runtime-deps.in
+async-timeout==5.0.1 ; python_version < "3.11"
+ # via -r requirements/runtime-deps.in
attrs==25.3.0
# via -r requirements/runtime-deps.in
brotli==1.1.0 ; platform_python_implementation == "CPython"
@@ -22,7 +24,7 @@ frozenlist==1.7.0
# aiosignal
idna==3.10
# via yarl
-multidict==6.5.1
+multidict==6.6.2
# via
# -r requirements/runtime-deps.in
# yarl
@@ -34,6 +36,8 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
+typing-extensions==4.14.0
+ # via multidict
yarl==1.20.1
# via -r requirements/runtime-deps.in
zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
diff --git a/requirements/test.txt b/requirements/test.txt
index 34f767a6dd4..9496f193b61 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -59,7 +59,7 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.5.1
+multidict==6.6.2
# via
# -r requirements/runtime-deps.in
# yarl
@@ -153,3 +153,5 @@ yarl==1.20.1
# via -r requirements/runtime-deps.in
zlib-ng==0.5.1
# via -r requirements/test.in
+zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
+ # via -r requirements/runtime-deps.in
From 1826cf040708f3223271d32261e7f8c63a0822b3 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 2 Jul 2025 11:26:14 +0000
Subject: [PATCH 51/70] Bump pytest-xdist from 3.7.0 to 3.8.0 (#11257)
Bumps [pytest-xdist](https://github.com/pytest-dev/pytest-xdist) from
3.7.0 to 3.8.0.
Changelog
Sourced from pytest-xdist's
changelog.
pytest-xdist 3.8.0 (2025-06-30)
Features
-
[#1083](https://github.com/pytest-dev/pytest-xdist/issues/1083)
<https://github.com/pytest-dev/pytest-xdist/issues/1083>_:
Add --no-loadscope-reorder and
--loadscope-reorder option to control whether to
automatically reorder tests in loadscope for tests where relative
ordering matters. This only applies when using
loadscope.
For example, [test_file_1, test_file_2, ..., test_file_n] are given
as input test files, if --no-loadscope-reorder is used, for
either worker, the test_file_a will be executed before
test_file_b only if a < b.
The default behavior is to reorder the tests to maximize the number
of tests that can be executed in parallel.
Commits
1e3e4dc
Release 3.8.0
600aad5
Ensure all xdist group names are strings (#1216)
9d7ba5b
Add --no-loadscope-reorder and
--loadscope-reorder options (#1217)
532f07f
Merge pull request #1210
from pytest-dev/pre-commit-ci-update-config
0883ad0
Fix Path usage in test_rsync_roots_no_roots
58a51bc
[pre-commit.ci] pre-commit autoupdate
59a2ad0
Merge pull request #1220
from pytest-dev/dependabot/github_actions/github-act...
d42b9c7
build(deps): bump hynek/build-and-inspect-python-package
ebfcb99
Merge pull request #1206
from pytest-dev/release-3.7.0
23b7fd6
[pre-commit.ci] pre-commit autoupdate (#1207)
- See full diff in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/test.txt | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index dbc3b14fd4b..83fad2ffad9 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -193,7 +193,7 @@ pytest-mock==3.14.1
# via
# -r requirements/lint.in
# -r requirements/test.in
-pytest-xdist==3.7.0
+pytest-xdist==3.8.0
# via -r requirements/test.in
python-dateutil==2.9.0.post0
# via freezegun
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 1a2bb685fc0..e50bad1da50 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -188,7 +188,7 @@ pytest-mock==3.14.1
# via
# -r requirements/lint.in
# -r requirements/test.in
-pytest-xdist==3.7.0
+pytest-xdist==3.8.0
# via -r requirements/test.in
python-dateutil==2.9.0.post0
# via freezegun
diff --git a/requirements/test.txt b/requirements/test.txt
index 9496f193b61..9d835b4960b 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -110,7 +110,7 @@ pytest-cov==6.2.1
# via -r requirements/test.in
pytest-mock==3.14.1
# via -r requirements/test.in
-pytest-xdist==3.7.0
+pytest-xdist==3.8.0
# via -r requirements/test.in
python-dateutil==2.9.0.post0
# via freezegun
From 4d078e0d90f873bf7712a83cc04a24888f3fdd69 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Wed, 2 Jul 2025 18:49:13 +0000
Subject: [PATCH 52/70] Bump multidict from 6.6.2 to 6.6.3 (#11254)
Bumps [multidict](https://github.com/aio-libs/multidict) from 6.6.2 to
6.6.3.
Release notes
Sourced from multidict's
releases.
6.6.3
Bug fixes
-
Fixed inconsistencies generated by the C implementation of
_md_shrink() which might later lead to assertion failures
and crash -- by :user:Romain-Geissler-1A.
Related issues and pull requests on GitHub:
#1229.
Changelog
Sourced from multidict's
changelog.
6.6.3
(2025-06-30)
Bug fixes
-
Fixed inconsistencies generated by the C implementation of
_md_shrink() which might later lead to assertion failures
and crash -- by :user:Romain-Geissler-1A.
Related issues and pull requests on GitHub:
:issue:1229.
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/base.txt | 2 +-
requirements/constraints.txt | 2 +-
requirements/cython.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/multidict.txt | 2 +-
requirements/runtime-deps.txt | 2 +-
requirements/test.txt | 2 +-
7 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index 8c4a9d08b20..288575c0f3e 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -26,7 +26,7 @@ gunicorn==23.0.0
# via -r requirements/base.in
idna==3.4
# via yarl
-multidict==6.6.2
+multidict==6.6.3
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 83fad2ffad9..33d1e081fae 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -113,7 +113,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.6.2
+multidict==6.6.3
# via
# -r requirements/multidict.in
# -r requirements/runtime-deps.in
diff --git a/requirements/cython.txt b/requirements/cython.txt
index b74caa3bc61..1194857bdfe 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -6,7 +6,7 @@
#
cython==3.1.2
# via -r requirements/cython.in
-multidict==6.6.2
+multidict==6.6.3
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/dev.txt b/requirements/dev.txt
index e50bad1da50..731c0d8550a 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -111,7 +111,7 @@ markupsafe==3.0.2
# via jinja2
mdurl==0.1.2
# via markdown-it-py
-multidict==6.6.2
+multidict==6.6.3
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index 5868b9f430f..99888e27364 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -4,7 +4,7 @@
#
# pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in
#
-multidict==6.6.2
+multidict==6.6.3
# via -r requirements/multidict.in
typing-extensions==4.14.0
# via multidict
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index 2df46388d6a..488e8dac6a0 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -24,7 +24,7 @@ frozenlist==1.7.0
# aiosignal
idna==3.10
# via yarl
-multidict==6.6.2
+multidict==6.6.3
# via
# -r requirements/runtime-deps.in
# yarl
diff --git a/requirements/test.txt b/requirements/test.txt
index 9d835b4960b..afcf8dbba11 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -59,7 +59,7 @@ markdown-it-py==3.0.0
# via rich
mdurl==0.1.2
# via markdown-it-py
-multidict==6.6.2
+multidict==6.6.3
# via
# -r requirements/runtime-deps.in
# yarl
From 7577668140935554ae4450df2913deb67503073a Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 3 Jul 2025 11:23:49 +0000
Subject: [PATCH 53/70] Bump coverage from 7.9.1 to 7.9.2 (#11259)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.9.1 to
7.9.2.
Changelog
Sourced from coverage's
changelog.
Version 7.9.2 — 2025-07-03
-
Fix: complex conditionals within a line might cause a KeyError when
using
sys.monitoring, as reported in issue 1991_. This is now
fixed.
-
Fix: we can now measure coverage for code in Python archive (.par)
files.
Thanks, Itamer Oren <pull 1984_>_.
.. _pull 1984: nedbat/coveragepy#1984
.. _issue 1991: nedbat/coveragepy#1991
.. _changes_7-9-1:
Commits
6e77492
docs: oops, beta 3
b24cf7e
docs: sample HTML for 7.9.2
35305c3
docs: prep for 7.9.2
9a8d9b6
docs: add pull request link
88dcaa2
fix: assume a missing line number is intra-line. #1991
678ec80
build: use pyenv for nightly builds. Thanks, Paul Timmins
a3d00d5
build: workflow jobs should have names
279310a
chore: bump the action-dependencies group with 2 updates (#1988)
614dfbf
fix: enable measuring test coverage for Python archive (.par) files (#1984)
42bf82c
chore: bump the action-dependencies group with 2 updates (#1985)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/test.txt | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 33d1e081fae..7cec3cc5487 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -54,7 +54,7 @@ click==8.1.8
# slotscheck
# towncrier
# wait-for-it
-coverage==7.9.1
+coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 731c0d8550a..2e06514216c 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -54,7 +54,7 @@ click==8.1.8
# slotscheck
# towncrier
# wait-for-it
-coverage==7.9.1
+coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
diff --git a/requirements/test.txt b/requirements/test.txt
index afcf8dbba11..417a02b9bae 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -27,7 +27,7 @@ cffi==1.17.1
# pytest-codspeed
click==8.1.8
# via wait-for-it
-coverage==7.9.1
+coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
From 30c44b39579b46be2387869b52920a5d77fe7ab2 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 3 Jul 2025 11:30:41 +0000
Subject: [PATCH 54/70] Bump cryptography from 45.0.4 to 45.0.5 (#11260)
Bumps [cryptography](https://github.com/pyca/cryptography) from 45.0.4
to 45.0.5.
Changelog
Sourced from cryptography's
changelog.
45.0.5 - 2025-07-02
* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL
3.5.1.
.. _v45-0-4:
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 13 ++++++++++++-
requirements/test.txt | 2 +-
4 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 7cec3cc5487..2d69f0576a9 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -58,7 +58,7 @@ coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.4
+cryptography==45.0.5
# via
# pyjwt
# trustme
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 2e06514216c..65f2a226500 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -58,7 +58,7 @@ coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.4
+cryptography==45.0.5
# via
# pyjwt
# trustme
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 22eccd8a9c3..49bc2d2e62d 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -8,6 +8,8 @@ aiodns==3.5.0
# via -r requirements/lint.in
annotated-types==0.7.0
# via pydantic
+async-timeout==5.0.1
+ # via valkey
blockbuster==1.5.24
# via -r requirements/lint.in
cffi==1.17.1
@@ -19,10 +21,12 @@ cfgv==3.4.0
# via pre-commit
click==8.1.8
# via slotscheck
-cryptography==45.0.4
+cryptography==45.0.5
# via trustme
distlib==0.3.9
# via virtualenv
+exceptiongroup==1.3.0
+ # via pytest
filelock==3.18.0
# via virtualenv
forbiddenfruit==0.1.4
@@ -90,14 +94,21 @@ six==1.17.0
# via python-dateutil
slotscheck==0.19.1
# via -r requirements/lint.in
+tomli==2.2.1
+ # via
+ # mypy
+ # pytest
+ # slotscheck
trustme==1.2.1
# via -r requirements/lint.in
typing-extensions==4.14.0
# via
+ # exceptiongroup
# mypy
# pydantic
# pydantic-core
# python-on-whales
+ # rich
# typing-inspection
typing-inspection==0.4.1
# via pydantic
diff --git a/requirements/test.txt b/requirements/test.txt
index 417a02b9bae..ec2ce608c87 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -31,7 +31,7 @@ coverage==7.9.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.4
+cryptography==45.0.5
# via trustme
exceptiongroup==1.3.0
# via pytest
From c9b85481fb88959be22088366a5b8df772f68c3f Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 3 Jul 2025 21:21:38 +0100
Subject: [PATCH 55/70] [PR #11234/a83597fa backport][3.13] Document exceptions
raised by send_frame et al. (#11263)
**This is a backport of PR #11234 as merged into master
(a83597fa88be7ac7dd5f6081d236d751cb40fe4d).**
Co-authored-by: Jonathan Ehwald
---
CHANGES/11234.doc.rst | 2 ++
docs/web_reference.rst | 24 +++++++++++++++++++-----
2 files changed, 21 insertions(+), 5 deletions(-)
create mode 100644 CHANGES/11234.doc.rst
diff --git a/CHANGES/11234.doc.rst b/CHANGES/11234.doc.rst
new file mode 100644
index 00000000000..900b56a771c
--- /dev/null
+++ b/CHANGES/11234.doc.rst
@@ -0,0 +1,2 @@
+Clarified exceptions raised by ``WebSocketResponse.send_frame`` at al.
+-- by :user:`DoctorJohn`.
diff --git a/docs/web_reference.rst b/docs/web_reference.rst
index bcf20817aab..2d1882da17c 100644
--- a/docs/web_reference.rst
+++ b/docs/web_reference.rst
@@ -1118,7 +1118,9 @@ and :ref:`aiohttp-web-signals` handlers::
:class:`str` (converted to *UTF-8* encoded bytes)
or :class:`bytes`.
- :raise RuntimeError: if connections is not started or closing.
+ :raise RuntimeError: if the connections is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
.. versionchanged:: 3.0
@@ -1133,7 +1135,9 @@ and :ref:`aiohttp-web-signals` handlers::
:class:`str` (converted to *UTF-8* encoded bytes)
or :class:`bytes`.
- :raise RuntimeError: if connections is not started or closing.
+ :raise RuntimeError: if the connections is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
.. versionchanged:: 3.0
@@ -1150,10 +1154,12 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise TypeError: if data is not :class:`str`
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1170,11 +1176,13 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise TypeError: if data is not :class:`bytes`,
:class:`bytearray` or :class:`memoryview`.
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1195,12 +1203,14 @@ and :ref:`aiohttp-web-signals` handlers::
returns a JSON string
(:func:`json.dumps` by default).
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise ValueError: if data is not serializable object
:raise TypeError: if value returned by ``dumps`` param is not :class:`str`
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1230,6 +1240,10 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
+ :raise RuntimeError: if the connection is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionadded:: 3.11
.. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True)
From 133e2542d0174691f6956e84b6ccdc7fe2bd03e9 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 3 Jul 2025 21:21:54 +0100
Subject: [PATCH 56/70] [PR #11234/a83597fa backport][3.12] Document exceptions
raised by send_frame et al. (#11262)
**This is a backport of PR #11234 as merged into master
(a83597fa88be7ac7dd5f6081d236d751cb40fe4d).**
Co-authored-by: Jonathan Ehwald
---
CHANGES/11234.doc.rst | 2 ++
docs/web_reference.rst | 24 +++++++++++++++++++-----
2 files changed, 21 insertions(+), 5 deletions(-)
create mode 100644 CHANGES/11234.doc.rst
diff --git a/CHANGES/11234.doc.rst b/CHANGES/11234.doc.rst
new file mode 100644
index 00000000000..900b56a771c
--- /dev/null
+++ b/CHANGES/11234.doc.rst
@@ -0,0 +1,2 @@
+Clarified exceptions raised by ``WebSocketResponse.send_frame`` at al.
+-- by :user:`DoctorJohn`.
diff --git a/docs/web_reference.rst b/docs/web_reference.rst
index bcf20817aab..2d1882da17c 100644
--- a/docs/web_reference.rst
+++ b/docs/web_reference.rst
@@ -1118,7 +1118,9 @@ and :ref:`aiohttp-web-signals` handlers::
:class:`str` (converted to *UTF-8* encoded bytes)
or :class:`bytes`.
- :raise RuntimeError: if connections is not started or closing.
+ :raise RuntimeError: if the connections is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
.. versionchanged:: 3.0
@@ -1133,7 +1135,9 @@ and :ref:`aiohttp-web-signals` handlers::
:class:`str` (converted to *UTF-8* encoded bytes)
or :class:`bytes`.
- :raise RuntimeError: if connections is not started or closing.
+ :raise RuntimeError: if the connections is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
.. versionchanged:: 3.0
@@ -1150,10 +1154,12 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise TypeError: if data is not :class:`str`
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1170,11 +1176,13 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise TypeError: if data is not :class:`bytes`,
:class:`bytearray` or :class:`memoryview`.
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1195,12 +1203,14 @@ and :ref:`aiohttp-web-signals` handlers::
returns a JSON string
(:func:`json.dumps` by default).
- :raise RuntimeError: if connection is not started or closing
+ :raise RuntimeError: if the connection is not started.
:raise ValueError: if data is not serializable object
:raise TypeError: if value returned by ``dumps`` param is not :class:`str`
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionchanged:: 3.0
The method is converted into :term:`coroutine`,
@@ -1230,6 +1240,10 @@ and :ref:`aiohttp-web-signals` handlers::
single message,
``None`` for not overriding per-socket setting.
+ :raise RuntimeError: if the connection is not started.
+
+ :raise aiohttp.ClientConnectionResetError: if the connection is closing.
+
.. versionadded:: 3.11
.. method:: close(*, code=WSCloseCode.OK, message=b'', drain=True)
From 9571860347ef7570549415358f0a29d72f0852c8 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 4 Jul 2025 12:24:13 +0000
Subject: [PATCH 57/70] Bump aiosignal from 1.3.2 to 1.4.0 (#11267)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [aiosignal](https://github.com/aio-libs/aiosignal) from 1.3.2 to
1.4.0.
Release notes
Sourced from aiosignal's
releases.
1.4.0
Features
-
Added decorator functionality to Signal as a convenient
way to add a callback -- by @Vizonex.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>_
-
Improved type safety by allowing callback parameters to be type
checked (typing-extensions is now required for Python <3.13).
Parameters for a Signal callback should now be defined like
Signal[int, str] -- by @Vizonex and @Dreamsorcerer.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>,
[#710](https://github.com/aio-libs/aiosignal/issues/710)
<https://github.com/aio-libs/aiosignal/pulls/710>
Misc
- Removed the sphinxcontrib-asyncio documentation dependency.
[#528](https://github.com/aio-libs/aiosignal/issues/528)
<https://github.com/aio-libs/aiosignal/pull/528>_
Changelog
Sourced from aiosignal's
changelog.
1.4.0 (2025-07-03)
Features
-
Added decorator functionality to Signal as a convenient
way to add a callback -- by @Vizonex.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>_
-
Improved type safety by allowing callback parameters to be type
checked (typing-extensions is now required for Python <3.13).
Parameters for a Signal callback should now be defined like
Signal[int, str] -- by @Vizonex and @Dreamsorcerer.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>,
[#710](https://github.com/aio-libs/aiosignal/issues/710)
<https://github.com/aio-libs/aiosignal/pulls/710>
Misc
- Removed the sphinxcontrib-asyncio documentation dependency.
[#528](https://github.com/aio-libs/aiosignal/issues/528)
<https://github.com/aio-libs/aiosignal/pull/528>_
Commits
1cf8014
Fix deploy
892494c
Release v1.4 (#718)
fa36082
[pre-commit.ci] pre-commit autoupdate (#719)
b7f68f1
[pre-commit.ci] pre-commit autoupdate (#717)
2b1acac
Build(deps): Bump sigstore/gh-action-sigstore-python from 3.0.0 to 3.0.1
(#716)
17456ed
Build(deps): Bump tox from 4.26.0 to 4.27.0 (#715)
4c23690
Build(deps): Bump pytest from 8.4.0 to 8.4.1 (#714)
7be2f68
Build(deps): Bump mypy from 1.16.0 to 1.16.1 (#713)
5d62945
Build(deps): Bump coverage from 7.9.0 to 7.9.1 (#712)
a6d85c1
Build(deps): Bump dependabot/fetch-metadata from 2.3.0 to 2.4.0 (#694)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
---------
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sam Bull
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
aiohttp/tracing.py | 101 +++++++++++++++-------------------
aiohttp/web_app.py | 4 +-
requirements/base.txt | 6 +-
requirements/constraints.txt | 3 +-
requirements/dev.txt | 3 +-
requirements/runtime-deps.txt | 6 +-
requirements/test.txt | 3 +-
7 files changed, 59 insertions(+), 67 deletions(-)
diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py
index 012ed7bdaf6..568fa7f9e38 100644
--- a/aiohttp/tracing.py
+++ b/aiohttp/tracing.py
@@ -1,5 +1,5 @@
from types import SimpleNamespace
-from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar
+from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar
import attr
from aiosignal import Signal
@@ -12,14 +12,7 @@
from .client import ClientSession
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
-
- class _SignalCallback(Protocol[_ParamT_contra]):
- def __call__(
- self,
- __client_session: ClientSession,
- __trace_config_ctx: SimpleNamespace,
- __params: _ParamT_contra,
- ) -> Awaitable[None]: ...
+ _TracingSignal = Signal[ClientSession, SimpleNamespace, _ParamT_contra]
__all__ = (
@@ -49,54 +42,46 @@ class TraceConfig:
def __init__(
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
- self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = (
+ self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self)
+ self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = (
Signal(self)
)
- self._on_request_chunk_sent: Signal[
- _SignalCallback[TraceRequestChunkSentParams]
- ] = Signal(self)
- self._on_response_chunk_received: Signal[
- _SignalCallback[TraceResponseChunkReceivedParams]
+ self._on_response_chunk_received: _TracingSignal[
+ TraceResponseChunkReceivedParams
] = Signal(self)
- self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
+ self._on_request_end: _TracingSignal[TraceRequestEndParams] = Signal(self)
+ self._on_request_exception: _TracingSignal[TraceRequestExceptionParams] = (
+ Signal(self)
+ )
+ self._on_request_redirect: _TracingSignal[TraceRequestRedirectParams] = Signal(
self
)
- self._on_request_exception: Signal[
- _SignalCallback[TraceRequestExceptionParams]
+ self._on_connection_queued_start: _TracingSignal[
+ TraceConnectionQueuedStartParams
] = Signal(self)
- self._on_request_redirect: Signal[
- _SignalCallback[TraceRequestRedirectParams]
+ self._on_connection_queued_end: _TracingSignal[
+ TraceConnectionQueuedEndParams
] = Signal(self)
- self._on_connection_queued_start: Signal[
- _SignalCallback[TraceConnectionQueuedStartParams]
+ self._on_connection_create_start: _TracingSignal[
+ TraceConnectionCreateStartParams
] = Signal(self)
- self._on_connection_queued_end: Signal[
- _SignalCallback[TraceConnectionQueuedEndParams]
+ self._on_connection_create_end: _TracingSignal[
+ TraceConnectionCreateEndParams
] = Signal(self)
- self._on_connection_create_start: Signal[
- _SignalCallback[TraceConnectionCreateStartParams]
+ self._on_connection_reuseconn: _TracingSignal[
+ TraceConnectionReuseconnParams
] = Signal(self)
- self._on_connection_create_end: Signal[
- _SignalCallback[TraceConnectionCreateEndParams]
+ self._on_dns_resolvehost_start: _TracingSignal[
+ TraceDnsResolveHostStartParams
] = Signal(self)
- self._on_connection_reuseconn: Signal[
- _SignalCallback[TraceConnectionReuseconnParams]
- ] = Signal(self)
- self._on_dns_resolvehost_start: Signal[
- _SignalCallback[TraceDnsResolveHostStartParams]
- ] = Signal(self)
- self._on_dns_resolvehost_end: Signal[
- _SignalCallback[TraceDnsResolveHostEndParams]
- ] = Signal(self)
- self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = (
+ self._on_dns_resolvehost_end: _TracingSignal[TraceDnsResolveHostEndParams] = (
Signal(self)
)
- self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = (
+ self._on_dns_cache_hit: _TracingSignal[TraceDnsCacheHitParams] = Signal(self)
+ self._on_dns_cache_miss: _TracingSignal[TraceDnsCacheMissParams] = Signal(self)
+ self._on_request_headers_sent: _TracingSignal[TraceRequestHeadersSentParams] = (
Signal(self)
)
- self._on_request_headers_sent: Signal[
- _SignalCallback[TraceRequestHeadersSentParams]
- ] = Signal(self)
self._trace_config_ctx_factory = trace_config_ctx_factory
@@ -125,91 +110,91 @@ def freeze(self) -> None:
self._on_request_headers_sent.freeze()
@property
- def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
+ def on_request_start(self) -> "_TracingSignal[TraceRequestStartParams]":
return self._on_request_start
@property
def on_request_chunk_sent(
self,
- ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
+ ) -> "_TracingSignal[TraceRequestChunkSentParams]":
return self._on_request_chunk_sent
@property
def on_response_chunk_received(
self,
- ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
+ ) -> "_TracingSignal[TraceResponseChunkReceivedParams]":
return self._on_response_chunk_received
@property
- def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
+ def on_request_end(self) -> "_TracingSignal[TraceRequestEndParams]":
return self._on_request_end
@property
def on_request_exception(
self,
- ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
+ ) -> "_TracingSignal[TraceRequestExceptionParams]":
return self._on_request_exception
@property
def on_request_redirect(
self,
- ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
+ ) -> "_TracingSignal[TraceRequestRedirectParams]":
return self._on_request_redirect
@property
def on_connection_queued_start(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
+ ) -> "_TracingSignal[TraceConnectionQueuedStartParams]":
return self._on_connection_queued_start
@property
def on_connection_queued_end(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
+ ) -> "_TracingSignal[TraceConnectionQueuedEndParams]":
return self._on_connection_queued_end
@property
def on_connection_create_start(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
+ ) -> "_TracingSignal[TraceConnectionCreateStartParams]":
return self._on_connection_create_start
@property
def on_connection_create_end(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
+ ) -> "_TracingSignal[TraceConnectionCreateEndParams]":
return self._on_connection_create_end
@property
def on_connection_reuseconn(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
+ ) -> "_TracingSignal[TraceConnectionReuseconnParams]":
return self._on_connection_reuseconn
@property
def on_dns_resolvehost_start(
self,
- ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
+ ) -> "_TracingSignal[TraceDnsResolveHostStartParams]":
return self._on_dns_resolvehost_start
@property
def on_dns_resolvehost_end(
self,
- ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
+ ) -> "_TracingSignal[TraceDnsResolveHostEndParams]":
return self._on_dns_resolvehost_end
@property
- def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
+ def on_dns_cache_hit(self) -> "_TracingSignal[TraceDnsCacheHitParams]":
return self._on_dns_cache_hit
@property
- def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
+ def on_dns_cache_miss(self) -> "_TracingSignal[TraceDnsCacheMissParams]":
return self._on_dns_cache_miss
@property
def on_request_headers_sent(
self,
- ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
+ ) -> "_TracingSignal[TraceRequestHeadersSentParams]":
return self._on_request_headers_sent
diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py
index 854f9bce88d..619c0085da1 100644
--- a/aiohttp/web_app.py
+++ b/aiohttp/web_app.py
@@ -62,8 +62,8 @@
if TYPE_CHECKING:
- _AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
- _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
+ _AppSignal = Signal["Application"]
+ _RespPrepareSignal = Signal[Request, StreamResponse]
_Middlewares = FrozenList[Middleware]
_MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
_Subapps = List["Application"]
diff --git a/requirements/base.txt b/requirements/base.txt
index 288575c0f3e..f310a2cfb22 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -8,7 +8,7 @@ aiodns==3.5.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
async-timeout==5.0.1 ; python_version < "3.11"
# via -r requirements/runtime-deps.in
@@ -41,7 +41,9 @@ pycares==4.9.0
pycparser==2.22
# via cffi
typing-extensions==4.14.0
- # via multidict
+ # via
+ # aiosignal
+ # multidict
uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython"
# via -r requirements/base.in
yarl==1.20.1
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 2d69f0576a9..50a2e139f2e 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiohttp-theme==0.1.7
# via -r requirements/doc.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
alabaster==1.0.0
# via sphinx
@@ -268,6 +268,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# -r requirements/test.in
typing-extensions==4.14.0
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 65f2a226500..e51a3a1c252 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiohttp-theme==0.1.7
# via -r requirements/doc.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
alabaster==1.0.0
# via sphinx
@@ -259,6 +259,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# -r requirements/test.in
typing-extensions==4.14.0
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index 488e8dac6a0..59bd3db8f71 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -8,7 +8,7 @@ aiodns==3.5.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
async-timeout==5.0.1 ; python_version < "3.11"
# via -r requirements/runtime-deps.in
@@ -37,7 +37,9 @@ pycares==4.9.0
pycparser==2.22
# via cffi
typing-extensions==4.14.0
- # via multidict
+ # via
+ # aiosignal
+ # multidict
yarl==1.20.1
# via -r requirements/runtime-deps.in
zstandard==0.23.0 ; platform_python_implementation == "CPython" and python_version < "3.14"
diff --git a/requirements/test.txt b/requirements/test.txt
index ec2ce608c87..77bac49cd39 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -8,7 +8,7 @@ aiodns==3.5.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
annotated-types==0.7.0
# via pydantic
@@ -135,6 +135,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# via -r requirements/test.in
typing-extensions==4.14.0
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
From 1f5c5a8fc81942f0367ed13ea3b7dd88d8a7fb24 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Jul 2025 12:48:56 +0000
Subject: [PATCH 58/70] Bump typing-extensions from 4.14.0 to 4.14.1 (#11275)
Bumps [typing-extensions](https://github.com/python/typing_extensions)
from 4.14.0 to 4.14.1.
Release notes
Sourced from typing-extensions's
releases.
4.14.1
Release 4.14.1 (July 4, 2025)
- Fix usage of
typing_extensions.TypedDict nested inside
other types
(e.g., typing.Type[typing_extensions.TypedDict]). This is
not allowed by the
type system but worked on older versions, so we maintain support.
Changelog
Sourced from typing-extensions's
changelog.
Release 4.14.1 (July 4, 2025)
- Fix usage of
typing_extensions.TypedDict nested inside
other types
(e.g., typing.Type[typing_extensions.TypedDict]). This is
not allowed by the
type system but worked on older versions, so we maintain support.
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/base.txt | 2 +-
requirements/constraints.txt | 2 +-
requirements/cython.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/multidict.txt | 2 +-
requirements/runtime-deps.txt | 2 +-
requirements/test.txt | 2 +-
8 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/requirements/base.txt b/requirements/base.txt
index f310a2cfb22..83bb464efde 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -40,7 +40,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# aiosignal
# multidict
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 50a2e139f2e..caadb76ad29 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -266,7 +266,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# via
# -r requirements/lint.in
# -r requirements/test.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# aiosignal
# exceptiongroup
diff --git a/requirements/cython.txt b/requirements/cython.txt
index 1194857bdfe..c76c088578c 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -8,5 +8,5 @@ cython==3.1.2
# via -r requirements/cython.in
multidict==6.6.3
# via -r requirements/multidict.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via multidict
diff --git a/requirements/dev.txt b/requirements/dev.txt
index e51a3a1c252..6e19bd5a880 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -257,7 +257,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# via
# -r requirements/lint.in
# -r requirements/test.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# aiosignal
# exceptiongroup
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 49bc2d2e62d..07d2c51f020 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -101,7 +101,7 @@ tomli==2.2.1
# slotscheck
trustme==1.2.1
# via -r requirements/lint.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# exceptiongroup
# mypy
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index 99888e27364..6f90d5c4c34 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -6,5 +6,5 @@
#
multidict==6.6.3
# via -r requirements/multidict.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via multidict
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index 59bd3db8f71..6bf80715c7c 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -36,7 +36,7 @@ pycares==4.9.0
# via aiodns
pycparser==2.22
# via cffi
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# aiosignal
# multidict
diff --git a/requirements/test.txt b/requirements/test.txt
index 77bac49cd39..b18fce2b9fb 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -133,7 +133,7 @@ tomli==2.2.1
# pytest
trustme==1.2.1 ; platform_machine != "i686"
# via -r requirements/test.in
-typing-extensions==4.14.0
+typing-extensions==4.14.1
# via
# aiosignal
# exceptiongroup
From 2f9b8993d1432a7ba7963918692e34905cc3ba28 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Jul 2025 13:23:57 +0000
Subject: [PATCH 59/70] Bump pypa/cibuildwheel from 3.0.0 to 3.0.1 (#11278)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from
3.0.0 to 3.0.1.
Release notes
Sourced from pypa/cibuildwheel's
releases.
v3.0.1
- 🛠 Updates CPython 3.14 prerelease to 3.14.0b3 (#2471)
- ✨ Adds a CPython 3.14 prerelease iOS build (only when prerelease
builds are enabled)
(#2475)
Changelog
Sourced from pypa/cibuildwheel's
changelog.
v3.0.1
5 July 2025
- 🛠 Updates CPython 3.14 prerelease to 3.14.0b3 (#2471)
- ✨ Adds a CPython 3.14 prerelease iOS build (only when prerelease
builds are enabled)
(#2475)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/ci-cd.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml
index b1a11b3e38b..50f55e7d26e 100644
--- a/.github/workflows/ci-cd.yml
+++ b/.github/workflows/ci-cd.yml
@@ -418,7 +418,7 @@ jobs:
run: |
make cythonize
- name: Build wheels
- uses: pypa/cibuildwheel@v3.0.0
+ uses: pypa/cibuildwheel@v3.0.1
env:
CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }}
CIBW_ARCHS_MACOS: x86_64 arm64 universal2
From ce3c0a718c6bcec48fbbf3c656cc954b001d4cd4 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Mon, 7 Jul 2025 20:16:28 +0100
Subject: [PATCH 60/70] Bump aiosignal from 1.3.2 to 1.4.0 (#11267) (#11279)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [aiosignal](https://github.com/aio-libs/aiosignal) from 1.3.2 to
1.4.0.
Release notes
Sourced from aiosignal's
releases.
1.4.0
Features
-
Added decorator functionality to Signal as a convenient
way to add a callback -- by @Vizonex.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>_
-
Improved type safety by allowing callback parameters to be type
checked (typing-extensions is now required for Python <3.13).
Parameters for a Signal callback should now be defined like
Signal[int, str] -- by @Vizonex and @Dreamsorcerer.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>,
[#710](https://github.com/aio-libs/aiosignal/issues/710)
<https://github.com/aio-libs/aiosignal/pulls/710>
Misc
- Removed the sphinxcontrib-asyncio documentation dependency.
[#528](https://github.com/aio-libs/aiosignal/issues/528)
<https://github.com/aio-libs/aiosignal/pull/528>_
Changelog
Sourced from aiosignal's
changelog.
1.4.0 (2025-07-03)
Features
-
Added decorator functionality to Signal as a convenient
way to add a callback -- by @Vizonex.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>_
-
Improved type safety by allowing callback parameters to be type
checked (typing-extensions is now required for Python <3.13).
Parameters for a Signal callback should now be defined like
Signal[int, str] -- by @Vizonex and @Dreamsorcerer.
[#699](https://github.com/aio-libs/aiosignal/issues/699)
<https://github.com/aio-libs/aiosignal/pulls/699>,
[#710](https://github.com/aio-libs/aiosignal/issues/710)
<https://github.com/aio-libs/aiosignal/pulls/710>
Misc
- Removed the sphinxcontrib-asyncio documentation dependency.
[#528](https://github.com/aio-libs/aiosignal/issues/528)
<https://github.com/aio-libs/aiosignal/pull/528>_
Commits
1cf8014
Fix deploy
892494c
Release v1.4 (#718)
fa36082
[pre-commit.ci] pre-commit autoupdate (#719)
b7f68f1
[pre-commit.ci] pre-commit autoupdate (#717)
2b1acac
Build(deps): Bump sigstore/gh-action-sigstore-python from 3.0.0 to 3.0.1
(#716)
17456ed
Build(deps): Bump tox from 4.26.0 to 4.27.0 (#715)
4c23690
Build(deps): Bump pytest from 8.4.0 to 8.4.1 (#714)
7be2f68
Build(deps): Bump mypy from 1.16.0 to 1.16.1 (#713)
5d62945
Build(deps): Bump coverage from 7.9.0 to 7.9.1 (#712)
a6d85c1
Build(deps): Bump dependabot/fetch-metadata from 2.3.0 to 2.4.0 (#694)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
---------
(cherry picked from commit 9571860347ef7570549415358f0a29d72f0852c8)
## What do these changes do?
## Are there changes in behavior for the user?
## Is it a substantial burden for the maintainers to support this?
## Related issue number
## Checklist
- [ ] I think the code is well written
- [ ] Unit tests for the changes exist
- [ ] Documentation reflects the changes
- [ ] If you provide code modification, please add yourself to
`CONTRIBUTORS.txt`
* The format is <Name> <Surname>.
* Please keep alphabetical order, the file is sorted by names.
- [ ] Add a new news fragment into the `CHANGES/` folder
* name it `..rst` (e.g. `588.bugfix.rst`)
* if you don't have an issue number, change it to the pull request
number after creating the PR
* `.bugfix`: A bug fix for something the maintainers deemed an
improper undesired behavior that got corrected to match
pre-agreed expectations.
* `.feature`: A new behavior, public APIs. That sort of stuff.
* `.deprecation`: A declaration of future API removals and breaking
changes in behavior.
* `.breaking`: When something public is removed in a breaking way.
Could be deprecated in an earlier release.
* `.doc`: Notable updates to the documentation structure or build
process.
* `.packaging`: Notes for downstreams about unobvious side effects
and tooling. Changes in the test invocation considerations and
runtime assumptions.
* `.contrib`: Stuff that affects the contributor experience. e.g.
Running tests, building the docs, setting up the development
environment.
* `.misc`: Changes that are hard to assign to any of the above
categories.
* Make sure to use full sentences with correct case and punctuation,
for example:
```rst
Fixed issue with non-ascii contents in doctest text files
-- by :user:`contributor-gh-handle`.
```
Use the past tense or the present tense a non-imperative mood,
referring to what's changed compared to the last released version
of this project.
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
aiohttp/tracing.py | 101 +++++++++++++++-------------------
aiohttp/web_app.py | 4 +-
requirements/base.txt | 8 ++-
requirements/constraints.txt | 3 +-
requirements/dev.txt | 3 +-
requirements/runtime-deps.txt | 9 +--
requirements/test.txt | 3 +-
7 files changed, 61 insertions(+), 70 deletions(-)
diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py
index 012ed7bdaf6..568fa7f9e38 100644
--- a/aiohttp/tracing.py
+++ b/aiohttp/tracing.py
@@ -1,5 +1,5 @@
from types import SimpleNamespace
-from typing import TYPE_CHECKING, Awaitable, Mapping, Optional, Protocol, Type, TypeVar
+from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar
import attr
from aiosignal import Signal
@@ -12,14 +12,7 @@
from .client import ClientSession
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
-
- class _SignalCallback(Protocol[_ParamT_contra]):
- def __call__(
- self,
- __client_session: ClientSession,
- __trace_config_ctx: SimpleNamespace,
- __params: _ParamT_contra,
- ) -> Awaitable[None]: ...
+ _TracingSignal = Signal[ClientSession, SimpleNamespace, _ParamT_contra]
__all__ = (
@@ -49,54 +42,46 @@ class TraceConfig:
def __init__(
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
- self._on_request_start: Signal[_SignalCallback[TraceRequestStartParams]] = (
+ self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self)
+ self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = (
Signal(self)
)
- self._on_request_chunk_sent: Signal[
- _SignalCallback[TraceRequestChunkSentParams]
- ] = Signal(self)
- self._on_response_chunk_received: Signal[
- _SignalCallback[TraceResponseChunkReceivedParams]
+ self._on_response_chunk_received: _TracingSignal[
+ TraceResponseChunkReceivedParams
] = Signal(self)
- self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
+ self._on_request_end: _TracingSignal[TraceRequestEndParams] = Signal(self)
+ self._on_request_exception: _TracingSignal[TraceRequestExceptionParams] = (
+ Signal(self)
+ )
+ self._on_request_redirect: _TracingSignal[TraceRequestRedirectParams] = Signal(
self
)
- self._on_request_exception: Signal[
- _SignalCallback[TraceRequestExceptionParams]
+ self._on_connection_queued_start: _TracingSignal[
+ TraceConnectionQueuedStartParams
] = Signal(self)
- self._on_request_redirect: Signal[
- _SignalCallback[TraceRequestRedirectParams]
+ self._on_connection_queued_end: _TracingSignal[
+ TraceConnectionQueuedEndParams
] = Signal(self)
- self._on_connection_queued_start: Signal[
- _SignalCallback[TraceConnectionQueuedStartParams]
+ self._on_connection_create_start: _TracingSignal[
+ TraceConnectionCreateStartParams
] = Signal(self)
- self._on_connection_queued_end: Signal[
- _SignalCallback[TraceConnectionQueuedEndParams]
+ self._on_connection_create_end: _TracingSignal[
+ TraceConnectionCreateEndParams
] = Signal(self)
- self._on_connection_create_start: Signal[
- _SignalCallback[TraceConnectionCreateStartParams]
+ self._on_connection_reuseconn: _TracingSignal[
+ TraceConnectionReuseconnParams
] = Signal(self)
- self._on_connection_create_end: Signal[
- _SignalCallback[TraceConnectionCreateEndParams]
+ self._on_dns_resolvehost_start: _TracingSignal[
+ TraceDnsResolveHostStartParams
] = Signal(self)
- self._on_connection_reuseconn: Signal[
- _SignalCallback[TraceConnectionReuseconnParams]
- ] = Signal(self)
- self._on_dns_resolvehost_start: Signal[
- _SignalCallback[TraceDnsResolveHostStartParams]
- ] = Signal(self)
- self._on_dns_resolvehost_end: Signal[
- _SignalCallback[TraceDnsResolveHostEndParams]
- ] = Signal(self)
- self._on_dns_cache_hit: Signal[_SignalCallback[TraceDnsCacheHitParams]] = (
+ self._on_dns_resolvehost_end: _TracingSignal[TraceDnsResolveHostEndParams] = (
Signal(self)
)
- self._on_dns_cache_miss: Signal[_SignalCallback[TraceDnsCacheMissParams]] = (
+ self._on_dns_cache_hit: _TracingSignal[TraceDnsCacheHitParams] = Signal(self)
+ self._on_dns_cache_miss: _TracingSignal[TraceDnsCacheMissParams] = Signal(self)
+ self._on_request_headers_sent: _TracingSignal[TraceRequestHeadersSentParams] = (
Signal(self)
)
- self._on_request_headers_sent: Signal[
- _SignalCallback[TraceRequestHeadersSentParams]
- ] = Signal(self)
self._trace_config_ctx_factory = trace_config_ctx_factory
@@ -125,91 +110,91 @@ def freeze(self) -> None:
self._on_request_headers_sent.freeze()
@property
- def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
+ def on_request_start(self) -> "_TracingSignal[TraceRequestStartParams]":
return self._on_request_start
@property
def on_request_chunk_sent(
self,
- ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
+ ) -> "_TracingSignal[TraceRequestChunkSentParams]":
return self._on_request_chunk_sent
@property
def on_response_chunk_received(
self,
- ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
+ ) -> "_TracingSignal[TraceResponseChunkReceivedParams]":
return self._on_response_chunk_received
@property
- def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
+ def on_request_end(self) -> "_TracingSignal[TraceRequestEndParams]":
return self._on_request_end
@property
def on_request_exception(
self,
- ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
+ ) -> "_TracingSignal[TraceRequestExceptionParams]":
return self._on_request_exception
@property
def on_request_redirect(
self,
- ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
+ ) -> "_TracingSignal[TraceRequestRedirectParams]":
return self._on_request_redirect
@property
def on_connection_queued_start(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
+ ) -> "_TracingSignal[TraceConnectionQueuedStartParams]":
return self._on_connection_queued_start
@property
def on_connection_queued_end(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
+ ) -> "_TracingSignal[TraceConnectionQueuedEndParams]":
return self._on_connection_queued_end
@property
def on_connection_create_start(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
+ ) -> "_TracingSignal[TraceConnectionCreateStartParams]":
return self._on_connection_create_start
@property
def on_connection_create_end(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
+ ) -> "_TracingSignal[TraceConnectionCreateEndParams]":
return self._on_connection_create_end
@property
def on_connection_reuseconn(
self,
- ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
+ ) -> "_TracingSignal[TraceConnectionReuseconnParams]":
return self._on_connection_reuseconn
@property
def on_dns_resolvehost_start(
self,
- ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
+ ) -> "_TracingSignal[TraceDnsResolveHostStartParams]":
return self._on_dns_resolvehost_start
@property
def on_dns_resolvehost_end(
self,
- ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
+ ) -> "_TracingSignal[TraceDnsResolveHostEndParams]":
return self._on_dns_resolvehost_end
@property
- def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
+ def on_dns_cache_hit(self) -> "_TracingSignal[TraceDnsCacheHitParams]":
return self._on_dns_cache_hit
@property
- def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
+ def on_dns_cache_miss(self) -> "_TracingSignal[TraceDnsCacheMissParams]":
return self._on_dns_cache_miss
@property
def on_request_headers_sent(
self,
- ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
+ ) -> "_TracingSignal[TraceRequestHeadersSentParams]":
return self._on_request_headers_sent
diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py
index 854f9bce88d..619c0085da1 100644
--- a/aiohttp/web_app.py
+++ b/aiohttp/web_app.py
@@ -62,8 +62,8 @@
if TYPE_CHECKING:
- _AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
- _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
+ _AppSignal = Signal["Application"]
+ _RespPrepareSignal = Signal[Request, StreamResponse]
_Middlewares = FrozenList[Middleware]
_MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
_Subapps = List["Application"]
diff --git a/requirements/base.txt b/requirements/base.txt
index 2cd73f52418..74f528d67bc 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -8,7 +8,7 @@ aiodns==3.4.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
async-timeout==5.0.1 ; python_version < "3.11"
# via -r requirements/runtime-deps.in
@@ -40,8 +40,10 @@ pycares==4.8.0
# via aiodns
pycparser==2.22
# via cffi
-typing-extensions==4.13.2
- # via multidict
+typing-extensions==4.14.0
+ # via
+ # aiosignal
+ # multidict
uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython"
winloop==0.1.8; platform_system == "Windows" and implementation_name == "cpython"
# via -r requirements/base.in
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 9bcdeb5ff8b..4457788efc0 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiohttp-theme==0.1.7
# via -r requirements/doc.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
alabaster==1.0.0
# via sphinx
@@ -266,6 +266,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# -r requirements/test.in
typing-extensions==4.13.2
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 26728928cee..c9ab0cb822b 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
aiohttp-theme==0.1.7
# via -r requirements/doc.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
alabaster==1.0.0
# via sphinx
@@ -257,6 +257,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# -r requirements/test.in
typing-extensions==4.13.2
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt
index 58263ab61ed..4dca87c1362 100644
--- a/requirements/runtime-deps.txt
+++ b/requirements/runtime-deps.txt
@@ -8,7 +8,7 @@ aiodns==3.4.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
async-timeout==5.0.1 ; python_version < "3.11"
# via -r requirements/runtime-deps.in
@@ -36,7 +36,8 @@ pycares==4.8.0
# via aiodns
pycparser==2.22
# via cffi
-typing-extensions==4.13.2
- # via multidict
-yarl==1.20.0
+typing-extensions==4.14.0
+ # via
+ # aiosignal
+ # multidict
# via -r requirements/runtime-deps.in
diff --git a/requirements/test.txt b/requirements/test.txt
index 007852dbcaa..b1ff140b7cc 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -8,7 +8,7 @@ aiodns==3.4.0
# via -r requirements/runtime-deps.in
aiohappyeyeballs==2.6.1
# via -r requirements/runtime-deps.in
-aiosignal==1.3.2
+aiosignal==1.4.0
# via -r requirements/runtime-deps.in
annotated-types==0.7.0
# via pydantic
@@ -129,6 +129,7 @@ trustme==1.2.1 ; platform_machine != "i686"
# via -r requirements/test.in
typing-extensions==4.13.2
# via
+ # aiosignal
# exceptiongroup
# multidict
# mypy
From 5f4b36c4f7aa3b6bd8b64ac29145018a4c43543d Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Mon, 7 Jul 2025 21:31:33 +0100
Subject: [PATCH 61/70] [PR #11280/91108c90 backport][3.13] Bump the minimum
supported version of aiosignal to 1.4 (#11282)
**This is a backport of PR #11280 as merged into master
(91108c905f6265bd19e8d1aafbaf2826a33180d2).**
Co-authored-by: Sam Bull
---
CHANGES/11280.misc.rst | 1 +
requirements/runtime-deps.in | 2 +-
setup.cfg | 2 +-
3 files changed, 3 insertions(+), 2 deletions(-)
create mode 100644 CHANGES/11280.misc.rst
diff --git a/CHANGES/11280.misc.rst b/CHANGES/11280.misc.rst
new file mode 100644
index 00000000000..6750918bda7
--- /dev/null
+++ b/CHANGES/11280.misc.rst
@@ -0,0 +1 @@
+Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`.
diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in
index 9f254fc7e02..7400baa8370 100644
--- a/requirements/runtime-deps.in
+++ b/requirements/runtime-deps.in
@@ -2,7 +2,7 @@
aiodns >= 3.3.0
aiohappyeyeballs >= 2.5.0
-aiosignal >= 1.1.2
+aiosignal >= 1.4.0
async-timeout >= 4.0, < 6.0 ; python_version < "3.11"
attrs >= 17.3.0
Brotli; platform_python_implementation == 'CPython'
diff --git a/setup.cfg b/setup.cfg
index c8d17cdc162..426dd1a3fcf 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -50,7 +50,7 @@ include_package_data = True
install_requires =
aiohappyeyeballs >= 2.5.0
- aiosignal >= 1.1.2
+ aiosignal >= 1.4.0
async-timeout >= 4.0, < 6.0 ; python_version < "3.11"
attrs >= 17.3.0
frozenlist >= 1.1.1
From 03893711d35f3588a7e8891ffbf2b5a6d3319fae Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Mon, 7 Jul 2025 21:31:55 +0100
Subject: [PATCH 62/70] [PR #11280/91108c90 backport][3.12] Bump the minimum
supported version of aiosignal to 1.4 (#11281)
**This is a backport of PR #11280 as merged into master
(91108c905f6265bd19e8d1aafbaf2826a33180d2).**
Co-authored-by: Sam Bull
---
CHANGES/11280.misc.rst | 1 +
requirements/runtime-deps.in | 2 +-
setup.cfg | 2 +-
3 files changed, 3 insertions(+), 2 deletions(-)
create mode 100644 CHANGES/11280.misc.rst
diff --git a/CHANGES/11280.misc.rst b/CHANGES/11280.misc.rst
new file mode 100644
index 00000000000..6750918bda7
--- /dev/null
+++ b/CHANGES/11280.misc.rst
@@ -0,0 +1 @@
+Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`.
diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in
index 7b0382a7a2b..d748eab9fac 100644
--- a/requirements/runtime-deps.in
+++ b/requirements/runtime-deps.in
@@ -2,7 +2,7 @@
aiodns >= 3.3.0
aiohappyeyeballs >= 2.5.0
-aiosignal >= 1.1.2
+aiosignal >= 1.4.0
async-timeout >= 4.0, < 6.0 ; python_version < "3.11"
attrs >= 17.3.0
Brotli; platform_python_implementation == 'CPython'
diff --git a/setup.cfg b/setup.cfg
index 4adfde579a0..1f70301856b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -50,7 +50,7 @@ include_package_data = True
install_requires =
aiohappyeyeballs >= 2.5.0
- aiosignal >= 1.1.2
+ aiosignal >= 1.4.0
async-timeout >= 4.0, < 6.0 ; python_version < "3.11"
attrs >= 17.3.0
frozenlist >= 1.1.1
From 41115b7498451a3f3c794207b0767216646abc52 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Wed, 9 Jul 2025 19:54:43 +0100
Subject: [PATCH 63/70] Add trailer parsing logic (#11269) (#11286)
(cherry picked from commit 7dd4b5535e6bf9c2d2f05fde638517bff065ba74)
---
CHANGES/11269.feature.rst | 1 +
aiohttp/http_parser.py | 70 +++++++++--------
aiohttp/multipart.py | 2 +-
tests/test_http_parser.py | 155 ++++++++++++++++----------------------
4 files changed, 106 insertions(+), 122 deletions(-)
create mode 100644 CHANGES/11269.feature.rst
diff --git a/CHANGES/11269.feature.rst b/CHANGES/11269.feature.rst
new file mode 100644
index 00000000000..92cf173be14
--- /dev/null
+++ b/CHANGES/11269.feature.rst
@@ -0,0 +1 @@
+Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`.
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index 93b1f376437..0ec5cfb920f 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -148,8 +148,8 @@ def parse_headers(
# note: "raw" does not mean inclusion of OWS before/after the field value
raw_headers = []
- lines_idx = 1
- line = lines[1]
+ lines_idx = 0
+ line = lines[lines_idx]
line_count = len(lines)
while line:
@@ -406,6 +406,7 @@ def get_content_length() -> Optional[int]:
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -424,6 +425,7 @@ def get_content_length() -> Optional[int]:
compression=msg.compression,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
elif not empty_body and length is None and self.read_until_eof:
payload = StreamReader(
@@ -442,6 +444,7 @@ def get_content_length() -> Optional[int]:
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -479,6 +482,10 @@ def get_content_length() -> Optional[int]:
eof = True
data = b""
+ if isinstance(
+ underlying_exc, (InvalidHeader, TransferEncodingError)
+ ):
+ raise
if eof:
start_pos = 0
@@ -641,7 +648,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
compression,
upgrade,
chunked,
- ) = self.parse_headers(lines)
+ ) = self.parse_headers(lines[1:])
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
@@ -727,7 +734,7 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
compression,
upgrade,
chunked,
- ) = self.parse_headers(lines)
+ ) = self.parse_headers(lines[1:])
if close is None:
if version_o <= HttpVersion10:
@@ -770,6 +777,8 @@ def __init__(
response_with_body: bool = True,
auto_decompress: bool = True,
lax: bool = False,
+ *,
+ headers_parser: HeadersParser,
) -> None:
self._length = 0
self._type = ParseState.PARSE_UNTIL_EOF
@@ -778,6 +787,8 @@ def __init__(
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self._lax = lax
+ self._headers_parser = headers_parser
+ self._trailer_lines: list[bytes] = []
self.done = False
# payload decompression wrapper
@@ -854,7 +865,7 @@ def feed_data(
size_b = chunk[:i] # strip chunk-extensions
# Verify no LF in the chunk-extension
if b"\n" in (ext := chunk[i:pos]):
- exc = BadHttpMessage(
+ exc = TransferEncodingError(
f"Unexpected LF in chunk-extension: {ext!r}"
)
set_exception(self.payload, exc)
@@ -875,7 +886,7 @@ def feed_data(
chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ self._chunk = ChunkState.PARSE_TRAILERS
if self._lax and chunk.startswith(b"\r"):
chunk = chunk[1:]
else:
@@ -913,38 +924,31 @@ def feed_data(
self._chunk_tail = chunk
return False, b""
- # if stream does not contain trailer, after 0\r\n
- # we should get another \r\n otherwise
- # trailers needs to be skipped until \r\n\r\n
- if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
- head = chunk[: len(SEP)]
- if head == SEP:
- # end of stream
- self.payload.feed_eof()
- return True, chunk[len(SEP) :]
- # Both CR and LF, or only LF may not be received yet. It is
- # expected that CRLF or LF will be shown at the very first
- # byte next time, otherwise trailers should come. The last
- # CRLF which marks the end of response might not be
- # contained in the same TCP segment which delivered the
- # size indicator.
- if not head:
- return False, b""
- if head == SEP[:1]:
- self._chunk_tail = head
- return False, b""
- self._chunk = ChunkState.PARSE_TRAILERS
-
- # read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
- if pos >= 0:
- chunk = chunk[pos + len(SEP) :]
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
- else:
+ if pos < 0: # No line found
self._chunk_tail = chunk
return False, b""
+ line = chunk[:pos]
+ chunk = chunk[pos + len(SEP) :]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._trailer_lines.append(line)
+
+ # \r\n\r\n found, end of stream
+ if self._trailer_lines[-1] == b"":
+ # Headers and trailers are defined the same way,
+ # so we reuse the HeadersParser here.
+ try:
+ trailers, raw_trailers = self._headers_parser.parse_headers(
+ self._trailer_lines
+ )
+ finally:
+ self._trailer_lines.clear()
+ self.payload.feed_eof()
+ return True, chunk
+
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
index 2eb22a595b3..dae65d8385c 100644
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -781,7 +781,7 @@ async def _read_boundary(self) -> None:
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
async def _read_headers(self) -> "CIMultiDictProxy[str]":
- lines = [b""]
+ lines = []
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index a7cdbc311c5..ec51a46b89c 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -18,6 +18,7 @@
from aiohttp.http_parser import (
NO_EXTENSIONS,
DeflateBuffer,
+ HeadersParser,
HttpPayloadParser,
HttpRequestParser,
HttpRequestParserPy,
@@ -252,41 +253,13 @@ def test_content_length_transfer_encoding(parser: Any) -> None:
parser.feed_data(text)
-def test_bad_chunked_py(loop: Any, protocol: Any) -> None:
+def test_bad_chunked(parser: HttpRequestParser) -> None:
"""Test that invalid chunked encoding doesn't allow content-length to be used."""
- parser = HttpRequestParserPy(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- text = (
- b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
- + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
- )
- messages, upgrade, tail = parser.feed_data(text)
- assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
-
-
-@pytest.mark.skipif(
- "HttpRequestParserC" not in dir(aiohttp.http_parser),
- reason="C based HTTP parser not available",
-)
-def test_bad_chunked_c(loop: Any, protocol: Any) -> None:
- """C parser behaves differently. Maybe we should align them later."""
- parser = HttpRequestParserC(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
)
- with pytest.raises(http_exceptions.BadHttpMessage):
+ with pytest.raises(http_exceptions.BadHttpMessage, match="0_2e"):
parser.feed_data(text)
@@ -1174,8 +1147,8 @@ async def test_http_response_parser_bad_chunked_strict_py(loop, protocol) -> Non
text = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
)
- messages, upgrade, tail = response.feed_data(text)
- assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
+ with pytest.raises(http_exceptions.TransferEncodingError, match="5"):
+ response.feed_data(text)
@pytest.mark.dev_mode
@@ -1311,7 +1284,27 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None:
assert payload.is_eof()
-def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: Any):
+async def test_request_chunked_with_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ntest\r\n0\r\ntest: trailer\r\nsecond: test trailer\r\n\r\n"
+ messages, upgraded, tail = parser.feed_data(text)
+ assert not tail
+ msg, payload = messages[0]
+ assert await payload.read() == b"test"
+
+ # TODO: Add assertion of trailers when API added.
+
+
+async def test_request_chunked_reject_bad_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n0\r\nbad\ntrailer\r\n\r\n"
+ with pytest.raises(http_exceptions.BadHttpMessage, match=r"b'bad\\ntrailer'"):
+ parser.feed_data(text)
+
+
+def test_parse_no_length_or_te_on_post(
+ loop: asyncio.AbstractEventLoop,
+ protocol: BaseProtocol,
+ request_cls: type[HttpRequestParser],
+) -> None:
parser = request_cls(protocol, loop, limit=2**16)
text = b"POST /test HTTP/1.1\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
@@ -1494,19 +1487,10 @@ async def test_parse_chunked_payload_split_chunks(response: Any) -> None:
assert await reader.read() == b"firstsecond"
-@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
-async def test_parse_chunked_payload_with_lf_in_extensions_c_parser(
- loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
+async def test_parse_chunked_payload_with_lf_in_extensions(
+ parser: HttpRequestParser,
) -> None:
- """Test the C-parser with a chunked payload that has a LF in the chunk extensions."""
- # The C parser will raise a BadHttpMessage from feed_data
- parser = HttpRequestParserC(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
+ """Test chunked payload that has a LF in the chunk extensions."""
payload = (
b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
@@ -1517,31 +1501,6 @@ async def test_parse_chunked_payload_with_lf_in_extensions_c_parser(
parser.feed_data(payload)
-async def test_parse_chunked_payload_with_lf_in_extensions_py_parser(
- loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
-) -> None:
- """Test the py-parser with a chunked payload that has a LF in the chunk extensions."""
- # The py parser will not raise the BadHttpMessage directly, but instead
- # it will set the exception on the StreamReader.
- parser = HttpRequestParserPy(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- payload = (
- b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
- b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n"
- )
- messages, _, _ = parser.feed_data(payload)
- reader = messages[0][1]
- assert isinstance(reader.exception(), http_exceptions.BadHttpMessage)
- assert "\\nxx" in str(reader.exception())
-
-
def test_partial_url(parser: HttpRequestParser) -> None:
messages, upgrade, tail = parser.feed_data(b"GET /te")
assert len(messages) == 0
@@ -1628,7 +1587,7 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol):
class TestParsePayload:
async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out)
+ p = HttpPayloadParser(out, headers_parser=HeadersParser())
p.feed_data(b"data")
p.feed_eof()
@@ -1638,7 +1597,7 @@ async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None:
async def test_parse_length_payload_eof(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=4)
+ p = HttpPayloadParser(out, length=4, headers_parser=HeadersParser())
p.feed_data(b"da")
with pytest.raises(http_exceptions.ContentLengthError):
@@ -1648,7 +1607,7 @@ async def test_parse_chunked_payload_size_error(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
with pytest.raises(http_exceptions.TransferEncodingError):
p.feed_data(b"blah\r\n")
assert isinstance(out.exception(), http_exceptions.TransferEncodingError)
@@ -1657,7 +1616,7 @@ async def test_parse_chunked_payload_split_end(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"\r\n")
@@ -1668,7 +1627,7 @@ async def test_parse_chunked_payload_split_end2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n\r")
p.feed_data(b"\n")
@@ -1679,7 +1638,7 @@ async def test_parse_chunked_payload_split_end_trailers(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n")
p.feed_data(b"\r\n")
@@ -1691,7 +1650,7 @@ async def test_parse_chunked_payload_split_end_trailers2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r")
p.feed_data(b"\n")
@@ -1703,7 +1662,7 @@ async def test_parse_chunked_payload_split_end_trailers3(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ")
p.feed_data(b"912ec803b2ce49e4a541068d495ab570\r\n\r\n")
@@ -1714,7 +1673,7 @@ async def test_parse_chunked_payload_split_end_trailers4(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nC")
p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n")
@@ -1723,7 +1682,7 @@ async def test_parse_chunked_payload_split_end_trailers4(
async def test_http_payload_parser_length(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=2)
+ p = HttpPayloadParser(out, length=2, headers_parser=HeadersParser())
eof, tail = p.feed_data(b"1245")
assert eof
@@ -1736,7 +1695,9 @@ async def test_http_payload_parser_deflate(self, protocol: BaseProtocol) -> None
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
@@ -1750,7 +1711,9 @@ async def test_http_payload_parser_deflate_no_hdrs(
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
@@ -1763,7 +1726,9 @@ async def test_http_payload_parser_deflate_light(
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
@@ -1773,7 +1738,9 @@ async def test_http_payload_parser_deflate_split(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, compression="deflate")
+ p = HttpPayloadParser(
+ out, compression="deflate", headers_parser=HeadersParser()
+ )
# Feeding one correct byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"x")
@@ -1785,7 +1752,9 @@ async def test_http_payload_parser_deflate_split_err(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, compression="deflate")
+ p = HttpPayloadParser(
+ out, compression="deflate", headers_parser=HeadersParser()
+ )
# Feeding one wrong byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"K")
@@ -1797,7 +1766,7 @@ async def test_http_payload_parser_length_zero(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=0)
+ p = HttpPayloadParser(out, length=0, headers_parser=HeadersParser())
assert p.done
assert out.is_eof()
@@ -1805,7 +1774,12 @@ async def test_http_payload_parser_length_zero(
async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None:
compressed = brotli.compress(b"brotli data")
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=len(compressed), compression="br")
+ p = HttpPayloadParser(
+ out,
+ length=len(compressed),
+ compression="br",
+ headers_parser=HeadersParser(),
+ )
p.feed_data(compressed)
assert b"brotli data" == out._buffer[0]
assert out.is_eof()
@@ -1814,7 +1788,12 @@ async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None:
async def test_http_payload_zstandard(self, protocol: BaseProtocol) -> None:
compressed = zstandard.compress(b"zstd data")
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=len(compressed), compression="zstd")
+ p = HttpPayloadParser(
+ out,
+ length=len(compressed),
+ compression="zstd",
+ headers_parser=HeadersParser(),
+ )
p.feed_data(compressed)
assert b"zstd data" == out._buffer[0]
assert out.is_eof()
From e8d774f635dc6d1cd3174d0e38891da5de0e2b6a Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Wed, 9 Jul 2025 19:55:22 +0100
Subject: [PATCH 64/70] Add trailer parsing logic (#11269) (#11287)
(cherry picked from commit 7dd4b5535e6bf9c2d2f05fde638517bff065ba74)
---
CHANGES/11269.feature.rst | 1 +
aiohttp/http_parser.py | 70 +++++++++---------
aiohttp/multipart.py | 2 +-
tests/test_http_parser.py | 148 ++++++++++++++++----------------------
4 files changed, 100 insertions(+), 121 deletions(-)
create mode 100644 CHANGES/11269.feature.rst
diff --git a/CHANGES/11269.feature.rst b/CHANGES/11269.feature.rst
new file mode 100644
index 00000000000..92cf173be14
--- /dev/null
+++ b/CHANGES/11269.feature.rst
@@ -0,0 +1 @@
+Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`.
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index db61ab5264c..9f864b27876 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -142,8 +142,8 @@ def parse_headers(
# note: "raw" does not mean inclusion of OWS before/after the field value
raw_headers = []
- lines_idx = 1
- line = lines[1]
+ lines_idx = 0
+ line = lines[lines_idx]
line_count = len(lines)
while line:
@@ -400,6 +400,7 @@ def get_content_length() -> Optional[int]:
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -418,6 +419,7 @@ def get_content_length() -> Optional[int]:
compression=msg.compression,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
elif not empty_body and length is None and self.read_until_eof:
payload = StreamReader(
@@ -436,6 +438,7 @@ def get_content_length() -> Optional[int]:
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
lax=self.lax,
+ headers_parser=self._headers_parser,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -473,6 +476,10 @@ def get_content_length() -> Optional[int]:
eof = True
data = b""
+ if isinstance(
+ underlying_exc, (InvalidHeader, TransferEncodingError)
+ ):
+ raise
if eof:
start_pos = 0
@@ -635,7 +642,7 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
compression,
upgrade,
chunked,
- ) = self.parse_headers(lines)
+ ) = self.parse_headers(lines[1:])
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
@@ -721,7 +728,7 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
compression,
upgrade,
chunked,
- ) = self.parse_headers(lines)
+ ) = self.parse_headers(lines[1:])
if close is None:
if version_o <= HttpVersion10:
@@ -764,6 +771,8 @@ def __init__(
response_with_body: bool = True,
auto_decompress: bool = True,
lax: bool = False,
+ *,
+ headers_parser: HeadersParser,
) -> None:
self._length = 0
self._type = ParseState.PARSE_UNTIL_EOF
@@ -772,6 +781,8 @@ def __init__(
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self._lax = lax
+ self._headers_parser = headers_parser
+ self._trailer_lines: list[bytes] = []
self.done = False
# payload decompression wrapper
@@ -848,7 +859,7 @@ def feed_data(
size_b = chunk[:i] # strip chunk-extensions
# Verify no LF in the chunk-extension
if b"\n" in (ext := chunk[i:pos]):
- exc = BadHttpMessage(
+ exc = TransferEncodingError(
f"Unexpected LF in chunk-extension: {ext!r}"
)
set_exception(self.payload, exc)
@@ -869,7 +880,7 @@ def feed_data(
chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ self._chunk = ChunkState.PARSE_TRAILERS
if self._lax and chunk.startswith(b"\r"):
chunk = chunk[1:]
else:
@@ -907,38 +918,31 @@ def feed_data(
self._chunk_tail = chunk
return False, b""
- # if stream does not contain trailer, after 0\r\n
- # we should get another \r\n otherwise
- # trailers needs to be skipped until \r\n\r\n
- if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
- head = chunk[: len(SEP)]
- if head == SEP:
- # end of stream
- self.payload.feed_eof()
- return True, chunk[len(SEP) :]
- # Both CR and LF, or only LF may not be received yet. It is
- # expected that CRLF or LF will be shown at the very first
- # byte next time, otherwise trailers should come. The last
- # CRLF which marks the end of response might not be
- # contained in the same TCP segment which delivered the
- # size indicator.
- if not head:
- return False, b""
- if head == SEP[:1]:
- self._chunk_tail = head
- return False, b""
- self._chunk = ChunkState.PARSE_TRAILERS
-
- # read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
- if pos >= 0:
- chunk = chunk[pos + len(SEP) :]
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
- else:
+ if pos < 0: # No line found
self._chunk_tail = chunk
return False, b""
+ line = chunk[:pos]
+ chunk = chunk[pos + len(SEP) :]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._trailer_lines.append(line)
+
+ # \r\n\r\n found, end of stream
+ if self._trailer_lines[-1] == b"":
+ # Headers and trailers are defined the same way,
+ # so we reuse the HeadersParser here.
+ try:
+ trailers, raw_trailers = self._headers_parser.parse_headers(
+ self._trailer_lines
+ )
+ finally:
+ self._trailer_lines.clear()
+ self.payload.feed_eof()
+ return True, chunk
+
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
index 79f8481ee30..02605146720 100644
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -777,7 +777,7 @@ async def _read_boundary(self) -> None:
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
async def _read_headers(self) -> "CIMultiDictProxy[str]":
- lines = [b""]
+ lines = []
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 58fef625f82..385452c1cfb 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -17,6 +17,7 @@
from aiohttp.http_parser import (
NO_EXTENSIONS,
DeflateBuffer,
+ HeadersParser,
HttpPayloadParser,
HttpRequestParser,
HttpRequestParserPy,
@@ -244,41 +245,13 @@ def test_content_length_transfer_encoding(parser: Any) -> None:
parser.feed_data(text)
-def test_bad_chunked_py(loop: Any, protocol: Any) -> None:
+def test_bad_chunked(parser: HttpRequestParser) -> None:
"""Test that invalid chunked encoding doesn't allow content-length to be used."""
- parser = HttpRequestParserPy(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- text = (
- b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
- + b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
- )
- messages, upgrade, tail = parser.feed_data(text)
- assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
-
-
-@pytest.mark.skipif(
- "HttpRequestParserC" not in dir(aiohttp.http_parser),
- reason="C based HTTP parser not available",
-)
-def test_bad_chunked_c(loop: Any, protocol: Any) -> None:
- """C parser behaves differently. Maybe we should align them later."""
- parser = HttpRequestParserC(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
text = (
b"GET / HTTP/1.1\r\nHost: a\r\nTransfer-Encoding: chunked\r\n\r\n0_2e\r\n\r\n"
+ b"GET / HTTP/1.1\r\nHost: a\r\nContent-Length: 5\r\n\r\n0\r\n\r\n"
)
- with pytest.raises(http_exceptions.BadHttpMessage):
+ with pytest.raises(http_exceptions.BadHttpMessage, match="0_2e"):
parser.feed_data(text)
@@ -1158,8 +1131,8 @@ async def test_http_response_parser_bad_chunked_strict_py(loop, protocol) -> Non
text = (
b"HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n5 \r\nabcde\r\n0\r\n\r\n"
)
- messages, upgrade, tail = response.feed_data(text)
- assert isinstance(messages[0][1].exception(), http_exceptions.TransferEncodingError)
+ with pytest.raises(http_exceptions.TransferEncodingError, match="5"):
+ response.feed_data(text)
@pytest.mark.dev_mode
@@ -1295,7 +1268,27 @@ def test_parse_chunked_payload_chunk_extension(parser) -> None:
assert payload.is_eof()
-def test_parse_no_length_or_te_on_post(loop: Any, protocol: Any, request_cls: Any):
+async def test_request_chunked_with_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n4\r\ntest\r\n0\r\ntest: trailer\r\nsecond: test trailer\r\n\r\n"
+ messages, upgraded, tail = parser.feed_data(text)
+ assert not tail
+ msg, payload = messages[0]
+ assert await payload.read() == b"test"
+
+ # TODO: Add assertion of trailers when API added.
+
+
+async def test_request_chunked_reject_bad_trailer(parser: HttpRequestParser) -> None:
+ text = b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n0\r\nbad\ntrailer\r\n\r\n"
+ with pytest.raises(http_exceptions.BadHttpMessage, match=r"b'bad\\ntrailer'"):
+ parser.feed_data(text)
+
+
+def test_parse_no_length_or_te_on_post(
+ loop: asyncio.AbstractEventLoop,
+ protocol: BaseProtocol,
+ request_cls: type[HttpRequestParser],
+) -> None:
parser = request_cls(protocol, loop, limit=2**16)
text = b"POST /test HTTP/1.1\r\n\r\n"
msg, payload = parser.feed_data(text)[0][0]
@@ -1478,19 +1471,10 @@ async def test_parse_chunked_payload_split_chunks(response: Any) -> None:
assert await reader.read() == b"firstsecond"
-@pytest.mark.skipif(NO_EXTENSIONS, reason="Only tests C parser.")
-async def test_parse_chunked_payload_with_lf_in_extensions_c_parser(
- loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
+async def test_parse_chunked_payload_with_lf_in_extensions(
+ parser: HttpRequestParser,
) -> None:
- """Test the C-parser with a chunked payload that has a LF in the chunk extensions."""
- # The C parser will raise a BadHttpMessage from feed_data
- parser = HttpRequestParserC(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
+ """Test chunked payload that has a LF in the chunk extensions."""
payload = (
b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
@@ -1501,31 +1485,6 @@ async def test_parse_chunked_payload_with_lf_in_extensions_c_parser(
parser.feed_data(payload)
-async def test_parse_chunked_payload_with_lf_in_extensions_py_parser(
- loop: asyncio.AbstractEventLoop, protocol: BaseProtocol
-) -> None:
- """Test the py-parser with a chunked payload that has a LF in the chunk extensions."""
- # The py parser will not raise the BadHttpMessage directly, but instead
- # it will set the exception on the StreamReader.
- parser = HttpRequestParserPy(
- protocol,
- loop,
- 2**16,
- max_line_size=8190,
- max_field_size=8190,
- )
- payload = (
- b"GET / HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n2;\nxx\r\n4c\r\n0\r\n\r\n"
- b"GET /admin HTTP/1.1\r\nHost: localhost:5001\r\n"
- b"Transfer-Encoding: chunked\r\n\r\n0\r\n\r\n"
- )
- messages, _, _ = parser.feed_data(payload)
- reader = messages[0][1]
- assert isinstance(reader.exception(), http_exceptions.BadHttpMessage)
- assert "\\nxx" in str(reader.exception())
-
-
def test_partial_url(parser: HttpRequestParser) -> None:
messages, upgrade, tail = parser.feed_data(b"GET /te")
assert len(messages) == 0
@@ -1612,7 +1571,7 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol):
class TestParsePayload:
async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out)
+ p = HttpPayloadParser(out, headers_parser=HeadersParser())
p.feed_data(b"data")
p.feed_eof()
@@ -1622,7 +1581,7 @@ async def test_parse_eof_payload(self, protocol: BaseProtocol) -> None:
async def test_parse_length_payload_eof(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=4)
+ p = HttpPayloadParser(out, length=4, headers_parser=HeadersParser())
p.feed_data(b"da")
with pytest.raises(http_exceptions.ContentLengthError):
@@ -1632,7 +1591,7 @@ async def test_parse_chunked_payload_size_error(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
with pytest.raises(http_exceptions.TransferEncodingError):
p.feed_data(b"blah\r\n")
assert isinstance(out.exception(), http_exceptions.TransferEncodingError)
@@ -1641,7 +1600,7 @@ async def test_parse_chunked_payload_split_end(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"\r\n")
@@ -1652,7 +1611,7 @@ async def test_parse_chunked_payload_split_end2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n\r")
p.feed_data(b"\n")
@@ -1663,7 +1622,7 @@ async def test_parse_chunked_payload_split_end_trailers(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n")
p.feed_data(b"\r\n")
@@ -1675,7 +1634,7 @@ async def test_parse_chunked_payload_split_end_trailers2(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\n")
p.feed_data(b"Content-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r")
p.feed_data(b"\n")
@@ -1687,7 +1646,7 @@ async def test_parse_chunked_payload_split_end_trailers3(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nContent-MD5: ")
p.feed_data(b"912ec803b2ce49e4a541068d495ab570\r\n\r\n")
@@ -1698,7 +1657,7 @@ async def test_parse_chunked_payload_split_end_trailers4(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, chunked=True)
+ p = HttpPayloadParser(out, chunked=True, headers_parser=HeadersParser())
p.feed_data(b"4\r\nasdf\r\n0\r\nC")
p.feed_data(b"ontent-MD5: 912ec803b2ce49e4a541068d495ab570\r\n\r\n")
@@ -1707,7 +1666,7 @@ async def test_parse_chunked_payload_split_end_trailers4(
async def test_http_payload_parser_length(self, protocol: BaseProtocol) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=2)
+ p = HttpPayloadParser(out, length=2, headers_parser=HeadersParser())
eof, tail = p.feed_data(b"1245")
assert eof
@@ -1720,7 +1679,9 @@ async def test_http_payload_parser_deflate(self, protocol: BaseProtocol) -> None
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
@@ -1734,7 +1695,9 @@ async def test_http_payload_parser_deflate_no_hdrs(
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
assert out.is_eof()
@@ -1747,7 +1710,9 @@ async def test_http_payload_parser_deflate_light(
length = len(COMPRESSED)
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=length, compression="deflate")
+ p = HttpPayloadParser(
+ out, length=length, compression="deflate", headers_parser=HeadersParser()
+ )
p.feed_data(COMPRESSED)
assert b"data" == out._buffer[0]
@@ -1757,7 +1722,9 @@ async def test_http_payload_parser_deflate_split(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, compression="deflate")
+ p = HttpPayloadParser(
+ out, compression="deflate", headers_parser=HeadersParser()
+ )
# Feeding one correct byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"x")
@@ -1769,7 +1736,9 @@ async def test_http_payload_parser_deflate_split_err(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, compression="deflate")
+ p = HttpPayloadParser(
+ out, compression="deflate", headers_parser=HeadersParser()
+ )
# Feeding one wrong byte should be enough to choose exact
# deflate decompressor
p.feed_data(b"K")
@@ -1781,7 +1750,7 @@ async def test_http_payload_parser_length_zero(
self, protocol: BaseProtocol
) -> None:
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=0)
+ p = HttpPayloadParser(out, length=0, headers_parser=HeadersParser())
assert p.done
assert out.is_eof()
@@ -1789,7 +1758,12 @@ async def test_http_payload_parser_length_zero(
async def test_http_payload_brotli(self, protocol: BaseProtocol) -> None:
compressed = brotli.compress(b"brotli data")
out = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop())
- p = HttpPayloadParser(out, length=len(compressed), compression="br")
+ p = HttpPayloadParser(
+ out,
+ length=len(compressed),
+ compression="br",
+ headers_parser=HeadersParser(),
+ )
p.feed_data(compressed)
assert b"brotli data" == out._buffer[0]
assert out.is_eof()
From 8790eb0308713475400cffd8af01ee96f929b0fe Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 10 Jul 2025 00:41:42 +0100
Subject: [PATCH 65/70] [PR #11289/e38220fc backport][3.13] Fix
ClientSession.close() hanging with HTTPS proxy connections (#11292)
**This is a backport of PR #11289 as merged into master
(e38220fc4ed59c9de0dbe23da48e9cfd287c2ed7).**
---------
Co-authored-by: J. Nick Koston
---
CHANGES/11273.bugfix.rst | 1 +
aiohttp/connector.py | 22 ++++++++++++++++-
tests/test_connector.py | 29 ++++++++++++++++++++++
tests/test_proxy_functional.py | 45 ++++++++++++++++++++++++++++++++++
4 files changed, 96 insertions(+), 1 deletion(-)
create mode 100644 CHANGES/11273.bugfix.rst
diff --git a/CHANGES/11273.bugfix.rst b/CHANGES/11273.bugfix.rst
new file mode 100644
index 00000000000..b4d9948fbcd
--- /dev/null
+++ b/CHANGES/11273.bugfix.rst
@@ -0,0 +1 @@
+Fixed :py:meth:`ClientSession.close() ` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`.
diff --git a/aiohttp/connector.py b/aiohttp/connector.py
index 4479ae321bc..0fbacde3b42 100644
--- a/aiohttp/connector.py
+++ b/aiohttp/connector.py
@@ -229,6 +229,26 @@ def closed(self) -> bool:
return self._protocol is None or not self._protocol.is_connected()
+class _ConnectTunnelConnection(Connection):
+ """Special connection wrapper for CONNECT tunnels that must never be pooled.
+
+ This connection wraps the proxy connection that will be upgraded with TLS.
+ It must never be released to the pool because:
+ 1. Its 'closed' future will never complete, causing session.close() to hang
+ 2. It represents an intermediate state, not a reusable connection
+ 3. The real connection (with TLS) will be created separately
+ """
+
+ def release(self) -> None:
+ """Do nothing - don't pool or close the connection.
+
+ These connections are an intermediate state during the CONNECT tunnel
+ setup and will be cleaned up naturally after the TLS upgrade. If they
+ were to be pooled, they would never be properly closed, causing
+ session.close() to wait forever for their 'closed' future.
+ """
+
+
class _TransportPlaceholder:
"""placeholder for BaseConnector.connect function"""
@@ -1612,7 +1632,7 @@ async def _create_proxy_connection(
key = req.connection_key._replace(
proxy=None, proxy_auth=None, proxy_headers_hash=None
)
- conn = Connection(self, key, proto, self._loop)
+ conn = _ConnectTunnelConnection(self, key, proto, self._loop)
proxy_resp = await proxy_req.send(conn)
try:
protocol = conn._protocol
diff --git a/tests/test_connector.py b/tests/test_connector.py
index f4f33f74cd0..90a32140191 100644
--- a/tests/test_connector.py
+++ b/tests/test_connector.py
@@ -40,6 +40,7 @@
AddrInfoType,
Connection,
TCPConnector,
+ _ConnectTunnelConnection,
_DNSCacheTable,
)
from aiohttp.resolver import ResolveResult
@@ -4308,3 +4309,31 @@ async def test_available_connections_no_limits(
connection1.close()
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
+
+
+async def test_connect_tunnel_connection_release(
+ loop: asyncio.AbstractEventLoop,
+) -> None:
+ """Test _ConnectTunnelConnection.release() does not pool the connection."""
+ connector = mock.create_autospec(
+ aiohttp.BaseConnector, spec_set=True, instance=True
+ )
+ key = mock.create_autospec(ConnectionKey, spec_set=True, instance=True)
+ protocol = mock.create_autospec(ResponseHandler, spec_set=True, instance=True)
+
+ # Create a connect tunnel connection
+ conn = _ConnectTunnelConnection(connector, key, protocol, loop)
+
+ # Verify protocol is set
+ assert conn._protocol is protocol
+
+ # Release should do nothing (not pool the connection)
+ conn.release()
+
+ # Protocol should still be there (not released to pool)
+ assert conn._protocol is protocol
+ # Connector._release should NOT have been called
+ connector._release.assert_not_called()
+
+ # Clean up to avoid resource warning
+ conn.close()
diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py
index 5b33ed6ca3b..f4bc020d1f0 100644
--- a/tests/test_proxy_functional.py
+++ b/tests/test_proxy_functional.py
@@ -4,6 +4,7 @@
import platform
import ssl
import sys
+from contextlib import suppress
from re import match as match_regex
from typing import Awaitable, Callable
from unittest import mock
@@ -17,6 +18,7 @@
from aiohttp import ClientResponse, web
from aiohttp.client_exceptions import ClientConnectionError
from aiohttp.helpers import IS_MACOS, IS_WINDOWS
+from aiohttp.pytest_plugin import AiohttpServer
ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11)
@@ -884,3 +886,46 @@ async def test_proxy_auth() -> None:
proxy_auth=("user", "pass"),
):
pass
+
+
+async def test_https_proxy_connect_tunnel_session_close_no_hang(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that CONNECT tunnel connections are not pooled."""
+ # Regression test for issue #11273.
+
+ # Create a minimal proxy server
+ # The CONNECT method is handled at the protocol level, not by the handler
+ proxy_app = web.Application()
+ proxy_server = await aiohttp_server(proxy_app)
+ proxy_url = f"http://{proxy_server.host}:{proxy_server.port}"
+
+ # Create session and make HTTPS request through proxy
+ session = aiohttp.ClientSession()
+
+ try:
+ # This will fail during TLS upgrade because proxy doesn't establish tunnel
+ with suppress(aiohttp.ClientError):
+ async with session.get("https://example.com/test", proxy=proxy_url) as resp:
+ await resp.read()
+
+ # The critical test: Check if any connections were pooled with proxy=None
+ # This is the root cause of the hang - CONNECT tunnel connections
+ # should NOT be pooled
+ connector = session.connector
+ assert connector is not None
+
+ # Count connections with proxy=None in the pool
+ proxy_none_keys = [key for key in connector._conns if key.proxy is None]
+ proxy_none_count = len(proxy_none_keys)
+
+ # Before the fix, there would be a connection with proxy=None
+ # After the fix, CONNECT tunnel connections are not pooled
+ assert proxy_none_count == 0, (
+ f"Found {proxy_none_count} connections with proxy=None in pool. "
+ f"CONNECT tunnel connections should not be pooled - this is bug #11273"
+ )
+
+ finally:
+ # Clean close
+ await session.close()
From edf2abd2609a24cf1e7ac76da986af363aebf210 Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 10 Jul 2025 00:42:05 +0100
Subject: [PATCH 66/70] [PR #11289/e38220fc backport][3.12] Fix
ClientSession.close() hanging with HTTPS proxy connections (#11291)
**This is a backport of PR #11289 as merged into master
(e38220fc4ed59c9de0dbe23da48e9cfd287c2ed7).**
---------
Co-authored-by: J. Nick Koston
---
CHANGES/11273.bugfix.rst | 1 +
aiohttp/connector.py | 22 ++++++++++++++++-
tests/test_connector.py | 29 ++++++++++++++++++++++
tests/test_proxy_functional.py | 45 ++++++++++++++++++++++++++++++++++
4 files changed, 96 insertions(+), 1 deletion(-)
create mode 100644 CHANGES/11273.bugfix.rst
diff --git a/CHANGES/11273.bugfix.rst b/CHANGES/11273.bugfix.rst
new file mode 100644
index 00000000000..b4d9948fbcd
--- /dev/null
+++ b/CHANGES/11273.bugfix.rst
@@ -0,0 +1 @@
+Fixed :py:meth:`ClientSession.close() ` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`.
diff --git a/aiohttp/connector.py b/aiohttp/connector.py
index 4479ae321bc..0fbacde3b42 100644
--- a/aiohttp/connector.py
+++ b/aiohttp/connector.py
@@ -229,6 +229,26 @@ def closed(self) -> bool:
return self._protocol is None or not self._protocol.is_connected()
+class _ConnectTunnelConnection(Connection):
+ """Special connection wrapper for CONNECT tunnels that must never be pooled.
+
+ This connection wraps the proxy connection that will be upgraded with TLS.
+ It must never be released to the pool because:
+ 1. Its 'closed' future will never complete, causing session.close() to hang
+ 2. It represents an intermediate state, not a reusable connection
+ 3. The real connection (with TLS) will be created separately
+ """
+
+ def release(self) -> None:
+ """Do nothing - don't pool or close the connection.
+
+ These connections are an intermediate state during the CONNECT tunnel
+ setup and will be cleaned up naturally after the TLS upgrade. If they
+ were to be pooled, they would never be properly closed, causing
+ session.close() to wait forever for their 'closed' future.
+ """
+
+
class _TransportPlaceholder:
"""placeholder for BaseConnector.connect function"""
@@ -1612,7 +1632,7 @@ async def _create_proxy_connection(
key = req.connection_key._replace(
proxy=None, proxy_auth=None, proxy_headers_hash=None
)
- conn = Connection(self, key, proto, self._loop)
+ conn = _ConnectTunnelConnection(self, key, proto, self._loop)
proxy_resp = await proxy_req.send(conn)
try:
protocol = conn._protocol
diff --git a/tests/test_connector.py b/tests/test_connector.py
index c7938ed08e4..9932dee581b 100644
--- a/tests/test_connector.py
+++ b/tests/test_connector.py
@@ -40,6 +40,7 @@
AddrInfoType,
Connection,
TCPConnector,
+ _ConnectTunnelConnection,
_DNSCacheTable,
)
from aiohttp.resolver import ResolveResult
@@ -4311,3 +4312,31 @@ async def test_available_connections_no_limits(
connection1.close()
assert conn._available_connections(key) == 1
assert conn._available_connections(other_host_key2) == 1
+
+
+async def test_connect_tunnel_connection_release(
+ loop: asyncio.AbstractEventLoop,
+) -> None:
+ """Test _ConnectTunnelConnection.release() does not pool the connection."""
+ connector = mock.create_autospec(
+ aiohttp.BaseConnector, spec_set=True, instance=True
+ )
+ key = mock.create_autospec(ConnectionKey, spec_set=True, instance=True)
+ protocol = mock.create_autospec(ResponseHandler, spec_set=True, instance=True)
+
+ # Create a connect tunnel connection
+ conn = _ConnectTunnelConnection(connector, key, protocol, loop)
+
+ # Verify protocol is set
+ assert conn._protocol is protocol
+
+ # Release should do nothing (not pool the connection)
+ conn.release()
+
+ # Protocol should still be there (not released to pool)
+ assert conn._protocol is protocol
+ # Connector._release should NOT have been called
+ connector._release.assert_not_called()
+
+ # Clean up to avoid resource warning
+ conn.close()
diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py
index 5b33ed6ca3b..f4bc020d1f0 100644
--- a/tests/test_proxy_functional.py
+++ b/tests/test_proxy_functional.py
@@ -4,6 +4,7 @@
import platform
import ssl
import sys
+from contextlib import suppress
from re import match as match_regex
from typing import Awaitable, Callable
from unittest import mock
@@ -17,6 +18,7 @@
from aiohttp import ClientResponse, web
from aiohttp.client_exceptions import ClientConnectionError
from aiohttp.helpers import IS_MACOS, IS_WINDOWS
+from aiohttp.pytest_plugin import AiohttpServer
ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11)
@@ -884,3 +886,46 @@ async def test_proxy_auth() -> None:
proxy_auth=("user", "pass"),
):
pass
+
+
+async def test_https_proxy_connect_tunnel_session_close_no_hang(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that CONNECT tunnel connections are not pooled."""
+ # Regression test for issue #11273.
+
+ # Create a minimal proxy server
+ # The CONNECT method is handled at the protocol level, not by the handler
+ proxy_app = web.Application()
+ proxy_server = await aiohttp_server(proxy_app)
+ proxy_url = f"http://{proxy_server.host}:{proxy_server.port}"
+
+ # Create session and make HTTPS request through proxy
+ session = aiohttp.ClientSession()
+
+ try:
+ # This will fail during TLS upgrade because proxy doesn't establish tunnel
+ with suppress(aiohttp.ClientError):
+ async with session.get("https://example.com/test", proxy=proxy_url) as resp:
+ await resp.read()
+
+ # The critical test: Check if any connections were pooled with proxy=None
+ # This is the root cause of the hang - CONNECT tunnel connections
+ # should NOT be pooled
+ connector = session.connector
+ assert connector is not None
+
+ # Count connections with proxy=None in the pool
+ proxy_none_keys = [key for key in connector._conns if key.proxy is None]
+ proxy_none_count = len(proxy_none_keys)
+
+ # Before the fix, there would be a connection with proxy=None
+ # After the fix, CONNECT tunnel connections are not pooled
+ assert proxy_none_count == 0, (
+ f"Found {proxy_none_count} connections with proxy=None in pool. "
+ f"CONNECT tunnel connections should not be pooled - this is bug #11273"
+ )
+
+ finally:
+ # Clean close
+ await session.close()
From d176905a0b9b9fa1e39d15d12df5a9ee3ec21088 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 10 Jul 2025 11:00:40 +0000
Subject: [PATCH 67/70] Bump pytest-codspeed from 3.2.0 to 4.0.0 (#11295)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed)
from 3.2.0 to 4.0.0.
Release notes
Sourced from pytest-codspeed's
releases.
v4.0.0
What's Changed
This release introduces profiling to the walltime instrument and
includes several key improvements to the existing benchmark fixture API!
🎉
[!WARNING]
Since we're now using CodSpeedHQ/instrument-hooks
to control the instrument state, the performance may slightly change in
tiny microbenchmarks when upgrading.
🚀 Features
New Contributors
Full Changelog: https://github.com/CodSpeedHQ/pytest-codspeed/compare/v3.2.0...v4.0.0
Changelog
Sourced from pytest-codspeed's
changelog.
[4.0.0] - 2025-07-10
🚀 Features
⚙️ Internals
- Remove pre-releases from git-cliff changelog by
@art049
- Link to the documentation by
@art049
- Improve reliability of perf trampoline compatibility checks by
@art049
[4.0.0-beta1] - 2025-06-10
🐛 Bug Fixes
[4.0.0-beta] - 2025-06-06
🚀 Features
- Support pytest-benchmark's pedantic API by
@art049 in #81
- Make sure the benchmark fixture can only be called once per bench by
@art049
- Support marker attributes to customize the walltime execution by
@art049 in #80
- Use instrument hooks by
@not-matthias
- Add instrument-hooks native module by
@not-matthias
🐛 Bug Fixes
🧪 Testing
- Add benches from the documentation's getting started by
@art049 in #71
- Add simple python benches by
@art049
⚙️ Internals
Commits
f3b85bc
Release v4.0.0 🚀
f97b02d
chore: remove pre-releases from git-cliff changelog
a4e5901
feat: update readme
8b1fb2f
chore: link to the documentation
3181f6d
chore: improve reliability of perf trampoline compatibility checks
e49de52
Release v4.0.0-beta1 🚀
0eba0c5
fix: reenable walltime instrument hooks
7b8c2c9
Release v4.0.0-beta 🚀
96fe457
feat: support pytest-benchmark's pedantic API
c4adb9b
feat: make sure the benchmark fixture can only be called once per
bench
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index caadb76ad29..421da27a467 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -183,7 +183,7 @@ pytest==8.4.1
# pytest-cov
# pytest-mock
# pytest-xdist
-pytest-codspeed==3.2.0
+pytest-codspeed==4.0.0
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 6e19bd5a880..440f0e975af 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -178,7 +178,7 @@ pytest==8.4.1
# pytest-cov
# pytest-mock
# pytest-xdist
-pytest-codspeed==3.2.0
+pytest-codspeed==4.0.0
# via
# -r requirements/lint.in
# -r requirements/test.in
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 07d2c51f020..3ec364ff4eb 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -78,7 +78,7 @@ pytest==8.4.1
# -r requirements/lint.in
# pytest-codspeed
# pytest-mock
-pytest-codspeed==3.2.0
+pytest-codspeed==4.0.0
# via -r requirements/lint.in
pytest-mock==3.14.1
# via -r requirements/lint.in
diff --git a/requirements/test.txt b/requirements/test.txt
index b18fce2b9fb..6c540134a8f 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -104,7 +104,7 @@ pytest==8.4.1
# pytest-cov
# pytest-mock
# pytest-xdist
-pytest-codspeed==3.2.0
+pytest-codspeed==4.0.0
# via -r requirements/test.in
pytest-cov==6.2.1
# via -r requirements/test.in
From ffb9a33e040d1a865f4b7f6351cbe1cd60dd978c Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 10 Jul 2025 12:32:11 +0100
Subject: [PATCH 68/70] [PR #11290/16703bb9 backport][3.13] Fix file uploads
failing with HTTP 422 on 307/308 redirects (#11297)
**This is a backport of PR #11290 as merged into master
(16703bb955ae4a11a131cedbbbf3ec7aa55f4bb4).**
---------
Co-authored-by: J. Nick Koston
---
CHANGES/11270.bugfix.rst | 1 +
aiohttp/client.py | 6 +
aiohttp/payload.py | 31 ++++-
tests/test_client_functional.py | 225 ++++++++++++++++++++++++++++++++
tests/test_payload.py | 76 +++++++++++
5 files changed, 335 insertions(+), 4 deletions(-)
create mode 100644 CHANGES/11270.bugfix.rst
diff --git a/CHANGES/11270.bugfix.rst b/CHANGES/11270.bugfix.rst
new file mode 100644
index 00000000000..d1e0992b949
--- /dev/null
+++ b/CHANGES/11270.bugfix.rst
@@ -0,0 +1 @@
+Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`.
diff --git a/aiohttp/client.py b/aiohttp/client.py
index 26492cd15fe..0c72d5948ce 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -821,6 +821,12 @@ async def _connect_and_send_request(
data = None
if headers.get(hdrs.CONTENT_LENGTH):
headers.pop(hdrs.CONTENT_LENGTH)
+ else:
+ # For 307/308, always preserve the request body
+ # For 301/302 with non-POST methods, preserve the request body
+ # https://www.rfc-editor.org/rfc/rfc9110#section-15.4.3-3.1
+ # Use the existing payload to avoid recreating it from a potentially consumed file
+ data = req._body
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
hdrs.URI
diff --git a/aiohttp/payload.py b/aiohttp/payload.py
index d119d9beefc..3affa710b63 100644
--- a/aiohttp/payload.py
+++ b/aiohttp/payload.py
@@ -486,10 +486,14 @@ def _set_or_restore_start_position(self) -> None:
if self._start_position is None:
try:
self._start_position = self._value.tell()
- except OSError:
+ except (OSError, AttributeError):
self._consumed = True # Cannot seek, mark as consumed
return
- self._value.seek(self._start_position)
+ try:
+ self._value.seek(self._start_position)
+ except (OSError, AttributeError):
+ # Failed to seek back - mark as consumed since we've already read
+ self._consumed = True
def _read_and_available_len(
self, remaining_content_len: Optional[int]
@@ -540,11 +544,30 @@ def size(self) -> Optional[int]:
"""
Size of the payload in bytes.
- Returns the number of bytes remaining to be read from the file.
+ Returns the total size of the payload content from the initial position.
+ This ensures consistent Content-Length for requests, including 307/308 redirects
+ where the same payload instance is reused.
+
Returns None if the size cannot be determined (e.g., for unseekable streams).
"""
try:
- return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ # Store the start position on first access.
+ # This is critical when the same payload instance is reused (e.g., 307/308
+ # redirects). Without storing the initial position, after the payload is
+ # read once, the file position would be at EOF, which would cause the
+ # size calculation to return 0 (file_size - EOF position).
+ # By storing the start position, we ensure the size calculation always
+ # returns the correct total size for any subsequent use.
+ if self._start_position is None:
+ try:
+ self._start_position = self._value.tell()
+ except (OSError, AttributeError):
+ # Can't get position, can't determine size
+ return None
+
+ # Return the total size from the start position
+ # This ensures Content-Length is correct even after reading
+ return os.fstat(self._value.fileno()).st_size - self._start_position
except (AttributeError, OSError):
return None
diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py
index 08cc5c97538..230d47389c5 100644
--- a/tests/test_client_functional.py
+++ b/tests/test_client_functional.py
@@ -5286,3 +5286,228 @@ async def handler(request: web.Request) -> web.Response:
assert (
len(resp._raw_cookie_headers) == 12
), "All raw headers should be preserved"
+
+
+@pytest.mark.parametrize("status", (307, 308))
+async def test_file_upload_307_308_redirect(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int
+) -> None:
+ """Test that file uploads work correctly with 307/308 redirects.
+
+ This demonstrates the bug where file payloads get incorrect Content-Length
+ on redirect because the file position isn't reset.
+ """
+ received_bodies: list[bytes] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store the body content
+ body = await request.read()
+ received_bodies.append(body)
+
+ if str(request.url.path).endswith("/"):
+ # Redirect URLs ending with / to remove the trailing slash
+ return web.Response(
+ status=status,
+ headers={
+ "Location": str(request.url.with_path(request.url.path.rstrip("/")))
+ },
+ )
+
+ # Return success with the body size
+ return web.json_response(
+ {
+ "received_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_post("/upload/", handler)
+ app.router.add_post("/upload", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / f"test_upload_{status}.txt"
+ content = b"This is test file content for upload."
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL with trailing slash (will trigger redirect)
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.post("/upload/", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # The server should receive the full file content
+ assert result["received_size"] == expected_size
+ assert result["content_length"] == str(expected_size)
+
+ # Both requests should have received the same content
+ assert len(received_bodies) == 2
+ assert received_bodies[0] == content # First request
+ assert received_bodies[1] == content # After redirect
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+@pytest.mark.parametrize("status", [301, 302])
+@pytest.mark.parametrize("method", ["PUT", "PATCH", "DELETE"])
+async def test_file_upload_301_302_redirect_non_post(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int, method: str
+) -> None:
+ """Test that file uploads work correctly with 301/302 redirects for non-POST methods.
+
+ Per RFC 9110, 301/302 redirects should preserve the method and body for non-POST requests.
+ """
+ received_bodies: list[bytes] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store the body content
+ body = await request.read()
+ received_bodies.append(body)
+
+ if str(request.url.path).endswith("/"):
+ # Redirect URLs ending with / to remove the trailing slash
+ return web.Response(
+ status=status,
+ headers={
+ "Location": str(request.url.with_path(request.url.path.rstrip("/")))
+ },
+ )
+
+ # Return success with the body size
+ return web.json_response(
+ {
+ "method": request.method,
+ "received_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_route(method, "/upload/", handler)
+ app.router.add_route(method, "/upload", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / f"test_upload_{status}_{method.lower()}.txt"
+ content = f"Test {method} file content for {status} redirect.".encode()
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL with trailing slash (will trigger redirect)
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.request(method, "/upload/", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # The server should receive the full file content after redirect
+ assert result["method"] == method # Method should be preserved
+ assert result["received_size"] == expected_size
+ assert result["content_length"] == str(expected_size)
+
+ # Both requests should have received the same content
+ assert len(received_bodies) == 2
+ assert received_bodies[0] == content # First request
+ assert received_bodies[1] == content # After redirect
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+async def test_file_upload_307_302_redirect_chain(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path
+) -> None:
+ """Test that file uploads work correctly with 307->302->200 redirect chain.
+
+ This verifies that:
+ 1. 307 preserves POST method and file body
+ 2. 302 changes POST to GET and drops the body
+ 3. No body leaks to the final GET request
+ """
+ received_requests: list[dict[str, Any]] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store request details
+ body = await request.read()
+ received_requests.append(
+ {
+ "path": str(request.url.path),
+ "method": request.method,
+ "body_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ if request.url.path == "/upload307":
+ # First redirect: 307 should preserve method and body
+ return web.Response(status=307, headers={"Location": "/upload302"})
+ elif request.url.path == "/upload302":
+ # Second redirect: 302 should change POST to GET
+ return web.Response(status=302, headers={"Location": "/final"})
+ else:
+ # Final destination
+ return web.json_response(
+ {
+ "final_method": request.method,
+ "final_body_size": len(body),
+ "requests_received": len(received_requests),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_route("*", "/upload307", handler)
+ app.router.add_route("*", "/upload302", handler)
+ app.router.add_route("*", "/final", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / "test_redirect_chain.txt"
+ content = b"Test file content that should not leak to GET request"
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL that triggers 307->302->final redirect chain
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.post("/upload307", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # Verify the redirect chain
+ assert len(resp.history) == 2
+ assert resp.history[0].status == 307
+ assert resp.history[1].status == 302
+
+ # Verify final request is GET with no body
+ assert result["final_method"] == "GET"
+ assert result["final_body_size"] == 0
+ assert result["requests_received"] == 3
+
+ # Verify the request sequence
+ assert len(received_requests) == 3
+
+ # First request (307): POST with full body
+ assert received_requests[0]["path"] == "/upload307"
+ assert received_requests[0]["method"] == "POST"
+ assert received_requests[0]["body_size"] == expected_size
+ assert received_requests[0]["content_length"] == str(expected_size)
+
+ # Second request (302): POST with preserved body from 307
+ assert received_requests[1]["path"] == "/upload302"
+ assert received_requests[1]["method"] == "POST"
+ assert received_requests[1]["body_size"] == expected_size
+ assert received_requests[1]["content_length"] == str(expected_size)
+
+ # Third request (final): GET with no body (302 changed method and dropped body)
+ assert received_requests[2]["path"] == "/final"
+ assert received_requests[2]["method"] == "GET"
+ assert received_requests[2]["body_size"] == 0
+ assert received_requests[2]["content_length"] is None
+
+ finally:
+ await asyncio.to_thread(f.close)
diff --git a/tests/test_payload.py b/tests/test_payload.py
index 2fd0a0f60d9..e749881cc82 100644
--- a/tests/test_payload.py
+++ b/tests/test_payload.py
@@ -1278,3 +1278,79 @@ def open_file() -> TextIO:
assert len(writer.buffer) == utf16_file_size
finally:
await loop.run_in_executor(None, f.close)
+
+
+async def test_iobase_payload_size_after_reading(tmp_path: Path) -> None:
+ """Test that IOBasePayload.size returns correct size after file has been read.
+
+ This demonstrates the bug where size calculation doesn't account for
+ the current file position, causing issues with 307/308 redirects.
+ """
+ # Create a test file with known content
+ test_file = tmp_path / "test.txt"
+ content = b"Hello, World! This is test content."
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Open the file and create payload
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ p = payload.BufferedReaderPayload(f)
+
+ # First size check - should return full file size
+ assert p.size == expected_size
+
+ # Read the file (simulating first request)
+ writer = BufferWriter()
+ await p.write(writer)
+ assert len(writer.buffer) == expected_size
+
+ # Second size check - should still return full file size
+ # but currently returns 0 because file position is at EOF
+ assert p.size == expected_size # This assertion fails!
+
+ # Attempting to write again should write the full content
+ # but currently writes nothing because file is at EOF
+ writer2 = BufferWriter()
+ await p.write(writer2)
+ assert len(writer2.buffer) == expected_size # This also fails!
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+async def test_iobase_payload_size_unseekable() -> None:
+ """Test that IOBasePayload.size returns None for unseekable files."""
+
+ class UnseekableFile:
+ """Mock file object that doesn't support seeking."""
+
+ def __init__(self, content: bytes) -> None:
+ self.content = content
+ self.pos = 0
+
+ def read(self, size: int) -> bytes:
+ result = self.content[self.pos : self.pos + size]
+ self.pos += len(result)
+ return result
+
+ def tell(self) -> int:
+ raise OSError("Unseekable file")
+
+ content = b"Unseekable content"
+ f = UnseekableFile(content)
+ p = payload.IOBasePayload(f) # type: ignore[arg-type]
+
+ # Size should return None for unseekable files
+ assert p.size is None
+
+ # Payload should not be consumed before writing
+ assert p.consumed is False
+
+ # Writing should still work
+ writer = BufferWriter()
+ await p.write(writer)
+ assert writer.buffer == content
+
+ # For unseekable files that can't tell() or seek(),
+ # they are marked as consumed after the first write
+ assert p.consumed is True
From 13b20a1b0af87b86816355a9090de191723858fc Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Thu, 10 Jul 2025 12:41:06 +0100
Subject: [PATCH 69/70] [PR #11290/16703bb9 backport][3.12] Fix file uploads
failing with HTTP 422 on 307/308 redirects (#11296)
**This is a backport of PR #11290 as merged into master
(16703bb955ae4a11a131cedbbbf3ec7aa55f4bb4).**
---------
Co-authored-by: J. Nick Koston
---
CHANGES/11270.bugfix.rst | 1 +
aiohttp/client.py | 6 +
aiohttp/payload.py | 31 ++++-
tests/test_client_functional.py | 225 ++++++++++++++++++++++++++++++++
tests/test_payload.py | 76 +++++++++++
5 files changed, 335 insertions(+), 4 deletions(-)
create mode 100644 CHANGES/11270.bugfix.rst
diff --git a/CHANGES/11270.bugfix.rst b/CHANGES/11270.bugfix.rst
new file mode 100644
index 00000000000..d1e0992b949
--- /dev/null
+++ b/CHANGES/11270.bugfix.rst
@@ -0,0 +1 @@
+Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`.
diff --git a/aiohttp/client.py b/aiohttp/client.py
index 26492cd15fe..0c72d5948ce 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -821,6 +821,12 @@ async def _connect_and_send_request(
data = None
if headers.get(hdrs.CONTENT_LENGTH):
headers.pop(hdrs.CONTENT_LENGTH)
+ else:
+ # For 307/308, always preserve the request body
+ # For 301/302 with non-POST methods, preserve the request body
+ # https://www.rfc-editor.org/rfc/rfc9110#section-15.4.3-3.1
+ # Use the existing payload to avoid recreating it from a potentially consumed file
+ data = req._body
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
hdrs.URI
diff --git a/aiohttp/payload.py b/aiohttp/payload.py
index d119d9beefc..3affa710b63 100644
--- a/aiohttp/payload.py
+++ b/aiohttp/payload.py
@@ -486,10 +486,14 @@ def _set_or_restore_start_position(self) -> None:
if self._start_position is None:
try:
self._start_position = self._value.tell()
- except OSError:
+ except (OSError, AttributeError):
self._consumed = True # Cannot seek, mark as consumed
return
- self._value.seek(self._start_position)
+ try:
+ self._value.seek(self._start_position)
+ except (OSError, AttributeError):
+ # Failed to seek back - mark as consumed since we've already read
+ self._consumed = True
def _read_and_available_len(
self, remaining_content_len: Optional[int]
@@ -540,11 +544,30 @@ def size(self) -> Optional[int]:
"""
Size of the payload in bytes.
- Returns the number of bytes remaining to be read from the file.
+ Returns the total size of the payload content from the initial position.
+ This ensures consistent Content-Length for requests, including 307/308 redirects
+ where the same payload instance is reused.
+
Returns None if the size cannot be determined (e.g., for unseekable streams).
"""
try:
- return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ # Store the start position on first access.
+ # This is critical when the same payload instance is reused (e.g., 307/308
+ # redirects). Without storing the initial position, after the payload is
+ # read once, the file position would be at EOF, which would cause the
+ # size calculation to return 0 (file_size - EOF position).
+ # By storing the start position, we ensure the size calculation always
+ # returns the correct total size for any subsequent use.
+ if self._start_position is None:
+ try:
+ self._start_position = self._value.tell()
+ except (OSError, AttributeError):
+ # Can't get position, can't determine size
+ return None
+
+ # Return the total size from the start position
+ # This ensures Content-Length is correct even after reading
+ return os.fstat(self._value.fileno()).st_size - self._start_position
except (AttributeError, OSError):
return None
diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py
index 08cc5c97538..230d47389c5 100644
--- a/tests/test_client_functional.py
+++ b/tests/test_client_functional.py
@@ -5286,3 +5286,228 @@ async def handler(request: web.Request) -> web.Response:
assert (
len(resp._raw_cookie_headers) == 12
), "All raw headers should be preserved"
+
+
+@pytest.mark.parametrize("status", (307, 308))
+async def test_file_upload_307_308_redirect(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int
+) -> None:
+ """Test that file uploads work correctly with 307/308 redirects.
+
+ This demonstrates the bug where file payloads get incorrect Content-Length
+ on redirect because the file position isn't reset.
+ """
+ received_bodies: list[bytes] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store the body content
+ body = await request.read()
+ received_bodies.append(body)
+
+ if str(request.url.path).endswith("/"):
+ # Redirect URLs ending with / to remove the trailing slash
+ return web.Response(
+ status=status,
+ headers={
+ "Location": str(request.url.with_path(request.url.path.rstrip("/")))
+ },
+ )
+
+ # Return success with the body size
+ return web.json_response(
+ {
+ "received_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_post("/upload/", handler)
+ app.router.add_post("/upload", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / f"test_upload_{status}.txt"
+ content = b"This is test file content for upload."
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL with trailing slash (will trigger redirect)
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.post("/upload/", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # The server should receive the full file content
+ assert result["received_size"] == expected_size
+ assert result["content_length"] == str(expected_size)
+
+ # Both requests should have received the same content
+ assert len(received_bodies) == 2
+ assert received_bodies[0] == content # First request
+ assert received_bodies[1] == content # After redirect
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+@pytest.mark.parametrize("status", [301, 302])
+@pytest.mark.parametrize("method", ["PUT", "PATCH", "DELETE"])
+async def test_file_upload_301_302_redirect_non_post(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path, status: int, method: str
+) -> None:
+ """Test that file uploads work correctly with 301/302 redirects for non-POST methods.
+
+ Per RFC 9110, 301/302 redirects should preserve the method and body for non-POST requests.
+ """
+ received_bodies: list[bytes] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store the body content
+ body = await request.read()
+ received_bodies.append(body)
+
+ if str(request.url.path).endswith("/"):
+ # Redirect URLs ending with / to remove the trailing slash
+ return web.Response(
+ status=status,
+ headers={
+ "Location": str(request.url.with_path(request.url.path.rstrip("/")))
+ },
+ )
+
+ # Return success with the body size
+ return web.json_response(
+ {
+ "method": request.method,
+ "received_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_route(method, "/upload/", handler)
+ app.router.add_route(method, "/upload", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / f"test_upload_{status}_{method.lower()}.txt"
+ content = f"Test {method} file content for {status} redirect.".encode()
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL with trailing slash (will trigger redirect)
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.request(method, "/upload/", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # The server should receive the full file content after redirect
+ assert result["method"] == method # Method should be preserved
+ assert result["received_size"] == expected_size
+ assert result["content_length"] == str(expected_size)
+
+ # Both requests should have received the same content
+ assert len(received_bodies) == 2
+ assert received_bodies[0] == content # First request
+ assert received_bodies[1] == content # After redirect
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+async def test_file_upload_307_302_redirect_chain(
+ aiohttp_client: AiohttpClient, tmp_path: pathlib.Path
+) -> None:
+ """Test that file uploads work correctly with 307->302->200 redirect chain.
+
+ This verifies that:
+ 1. 307 preserves POST method and file body
+ 2. 302 changes POST to GET and drops the body
+ 3. No body leaks to the final GET request
+ """
+ received_requests: list[dict[str, Any]] = []
+
+ async def handler(request: web.Request) -> web.Response:
+ # Store request details
+ body = await request.read()
+ received_requests.append(
+ {
+ "path": str(request.url.path),
+ "method": request.method,
+ "body_size": len(body),
+ "content_length": request.headers.get("Content-Length"),
+ }
+ )
+
+ if request.url.path == "/upload307":
+ # First redirect: 307 should preserve method and body
+ return web.Response(status=307, headers={"Location": "/upload302"})
+ elif request.url.path == "/upload302":
+ # Second redirect: 302 should change POST to GET
+ return web.Response(status=302, headers={"Location": "/final"})
+ else:
+ # Final destination
+ return web.json_response(
+ {
+ "final_method": request.method,
+ "final_body_size": len(body),
+ "requests_received": len(received_requests),
+ }
+ )
+
+ app = web.Application()
+ app.router.add_route("*", "/upload307", handler)
+ app.router.add_route("*", "/upload302", handler)
+ app.router.add_route("*", "/final", handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a test file
+ test_file = tmp_path / "test_redirect_chain.txt"
+ content = b"Test file content that should not leak to GET request"
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Upload file to URL that triggers 307->302->final redirect chain
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ async with client.post("/upload307", data=f) as resp:
+ assert resp.status == 200
+ result = await resp.json()
+
+ # Verify the redirect chain
+ assert len(resp.history) == 2
+ assert resp.history[0].status == 307
+ assert resp.history[1].status == 302
+
+ # Verify final request is GET with no body
+ assert result["final_method"] == "GET"
+ assert result["final_body_size"] == 0
+ assert result["requests_received"] == 3
+
+ # Verify the request sequence
+ assert len(received_requests) == 3
+
+ # First request (307): POST with full body
+ assert received_requests[0]["path"] == "/upload307"
+ assert received_requests[0]["method"] == "POST"
+ assert received_requests[0]["body_size"] == expected_size
+ assert received_requests[0]["content_length"] == str(expected_size)
+
+ # Second request (302): POST with preserved body from 307
+ assert received_requests[1]["path"] == "/upload302"
+ assert received_requests[1]["method"] == "POST"
+ assert received_requests[1]["body_size"] == expected_size
+ assert received_requests[1]["content_length"] == str(expected_size)
+
+ # Third request (final): GET with no body (302 changed method and dropped body)
+ assert received_requests[2]["path"] == "/final"
+ assert received_requests[2]["method"] == "GET"
+ assert received_requests[2]["body_size"] == 0
+ assert received_requests[2]["content_length"] is None
+
+ finally:
+ await asyncio.to_thread(f.close)
diff --git a/tests/test_payload.py b/tests/test_payload.py
index 2fd0a0f60d9..e749881cc82 100644
--- a/tests/test_payload.py
+++ b/tests/test_payload.py
@@ -1278,3 +1278,79 @@ def open_file() -> TextIO:
assert len(writer.buffer) == utf16_file_size
finally:
await loop.run_in_executor(None, f.close)
+
+
+async def test_iobase_payload_size_after_reading(tmp_path: Path) -> None:
+ """Test that IOBasePayload.size returns correct size after file has been read.
+
+ This demonstrates the bug where size calculation doesn't account for
+ the current file position, causing issues with 307/308 redirects.
+ """
+ # Create a test file with known content
+ test_file = tmp_path / "test.txt"
+ content = b"Hello, World! This is test content."
+ await asyncio.to_thread(test_file.write_bytes, content)
+ expected_size = len(content)
+
+ # Open the file and create payload
+ f = await asyncio.to_thread(open, test_file, "rb")
+ try:
+ p = payload.BufferedReaderPayload(f)
+
+ # First size check - should return full file size
+ assert p.size == expected_size
+
+ # Read the file (simulating first request)
+ writer = BufferWriter()
+ await p.write(writer)
+ assert len(writer.buffer) == expected_size
+
+ # Second size check - should still return full file size
+ # but currently returns 0 because file position is at EOF
+ assert p.size == expected_size # This assertion fails!
+
+ # Attempting to write again should write the full content
+ # but currently writes nothing because file is at EOF
+ writer2 = BufferWriter()
+ await p.write(writer2)
+ assert len(writer2.buffer) == expected_size # This also fails!
+ finally:
+ await asyncio.to_thread(f.close)
+
+
+async def test_iobase_payload_size_unseekable() -> None:
+ """Test that IOBasePayload.size returns None for unseekable files."""
+
+ class UnseekableFile:
+ """Mock file object that doesn't support seeking."""
+
+ def __init__(self, content: bytes) -> None:
+ self.content = content
+ self.pos = 0
+
+ def read(self, size: int) -> bytes:
+ result = self.content[self.pos : self.pos + size]
+ self.pos += len(result)
+ return result
+
+ def tell(self) -> int:
+ raise OSError("Unseekable file")
+
+ content = b"Unseekable content"
+ f = UnseekableFile(content)
+ p = payload.IOBasePayload(f) # type: ignore[arg-type]
+
+ # Size should return None for unseekable files
+ assert p.size is None
+
+ # Payload should not be consumed before writing
+ assert p.consumed is False
+
+ # Writing should still work
+ writer = BufferWriter()
+ await p.write(writer)
+ assert writer.buffer == content
+
+ # For unseekable files that can't tell() or seek(),
+ # they are marked as consumed after the first write
+ assert p.consumed is True
From 90b6cf6f3e303309db6d388f1e53d0f30997e1c8 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Thu, 10 Jul 2025 13:08:46 +0100
Subject: [PATCH 70/70] Release 3.12.14 (#11298)
---
CHANGES.rst | 59 +++++++++++++++++++++++++++++++++++++++
CHANGES/11234.doc.rst | 2 --
CHANGES/11269.feature.rst | 1 -
CHANGES/11270.bugfix.rst | 1 -
CHANGES/11273.bugfix.rst | 1 -
CHANGES/11280.misc.rst | 1 -
aiohttp/__init__.py | 2 +-
7 files changed, 60 insertions(+), 7 deletions(-)
delete mode 100644 CHANGES/11234.doc.rst
delete mode 100644 CHANGES/11269.feature.rst
delete mode 100644 CHANGES/11270.bugfix.rst
delete mode 100644 CHANGES/11273.bugfix.rst
delete mode 100644 CHANGES/11280.misc.rst
diff --git a/CHANGES.rst b/CHANGES.rst
index d991d99cf5d..c701167b33a 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,65 @@
.. towncrier release notes start
+3.12.14 (2025-07-10)
+====================
+
+Bug fixes
+---------
+
+- Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`11270`.
+
+
+
+- Fixed :py:meth:`ClientSession.close() ` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`11273`.
+
+
+
+- Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`11280`.
+
+
+
+
+Features
+--------
+
+- Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`11269`.
+
+
+
+
+Improved documentation
+----------------------
+
+- Clarified exceptions raised by ``WebSocketResponse.send_frame`` et al.
+ -- by :user:`DoctorJohn`.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`11234`.
+
+
+
+
+----
+
+
3.12.13 (2025-06-14)
====================
diff --git a/CHANGES/11234.doc.rst b/CHANGES/11234.doc.rst
deleted file mode 100644
index 900b56a771c..00000000000
--- a/CHANGES/11234.doc.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Clarified exceptions raised by ``WebSocketResponse.send_frame`` at al.
--- by :user:`DoctorJohn`.
diff --git a/CHANGES/11269.feature.rst b/CHANGES/11269.feature.rst
deleted file mode 100644
index 92cf173be14..00000000000
--- a/CHANGES/11269.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-Added initial trailer parsing logic to Python HTTP parser -- by :user:`Dreamsorcerer`.
diff --git a/CHANGES/11270.bugfix.rst b/CHANGES/11270.bugfix.rst
deleted file mode 100644
index d1e0992b949..00000000000
--- a/CHANGES/11270.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed file uploads failing with HTTP 422 errors when encountering 307/308 redirects, and 301/302 redirects for non-POST methods, by preserving the request body when appropriate per :rfc:`9110#section-15.4.3-3.1` -- by :user:`bdraco`.
diff --git a/CHANGES/11273.bugfix.rst b/CHANGES/11273.bugfix.rst
deleted file mode 100644
index b4d9948fbcd..00000000000
--- a/CHANGES/11273.bugfix.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed :py:meth:`ClientSession.close() ` hanging indefinitely when using HTTPS requests through HTTP proxies -- by :user:`bdraco`.
diff --git a/CHANGES/11280.misc.rst b/CHANGES/11280.misc.rst
deleted file mode 100644
index 6750918bda7..00000000000
--- a/CHANGES/11280.misc.rst
+++ /dev/null
@@ -1 +0,0 @@
-Bumped minimum version of aiosignal to 1.4+ to resolve typing issues -- by :user:`Dreamsorcerer`.
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index cc73fcc2c8e..a3ab781e984 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.12.14.dev0"
+__version__ = "3.12.14"
from typing import TYPE_CHECKING, Tuple