Skip to content

Commit

Permalink
Fix multiplex err leak (#166)
Browse files Browse the repository at this point in the history
2.11.903 (2024-10-22)
=====================

- Fixed (low-level) exception leak when using ``get_response(...)``
after ``urlopen(..., multiplexed=True)``.
- Fixed erroneous calculated maximal wait when starting a connection
upgrade to a higher protocol version in rare cases (async+windows only).
  • Loading branch information
Ousret authored Oct 22, 2024
2 parents 7ca2f24 + a4003be commit c9cdc22
Show file tree
Hide file tree
Showing 13 changed files with 170 additions and 13 deletions.
6 changes: 6 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
2.11.903 (2024-10-22)
=====================

- Fixed (low-level) exception leak when using ``get_response(...)`` after ``urlopen(..., multiplexed=True)``.
- Fixed erroneous calculated maximal wait when starting a connection upgrade to a higher protocol version in rare cases (async+windows only).

2.11.902 (2024-10-22)
=====================

Expand Down
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,16 @@ python -m pip install urllib3

The order is (actually) important.

Super! But how can I do that when installing something that requires somewhere urllib3-future?

Let's say you want to install Niquests and keep BOTH urllib3 and urllib3-future, do:

```
URLLIB3_NO_OVERRIDE=true pip install niquests --no-binary urllib3-future
```

This applies to every package you wish to install and brings indirectly urllib3-future.

- **Can you guarantee us that everything will go smooth?**

Guarantee is a strong word with a lot of (legal) implication. We cannot offer a "guarantee".
Expand Down
2 changes: 2 additions & 0 deletions docs/advanced-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1032,6 +1032,8 @@ You can pass the following options to the DNS url:
- ``doh://dns.google/?disabled_svn=h3`` -> Disable HTTP/3
- proxy _(url)_
- proxy_headers
- keepalive_delay
- keepalive_idle_window

.. warning:: DNS over HTTPS support HTTP/1.1, HTTP/2 and HTTP/3. By default it tries to negotiate HTTP/2, then if available negotiate HTTP/3. The server must provide a valid ``Alt-Svc`` in responses.

Expand Down
2 changes: 1 addition & 1 deletion mypy-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
mypy==1.11.1; python_version >= '3.8'
mypy==1.12.1; python_version >= '3.8'
mypy==1.4.1; python_version < '3.8'
idna>=2.0.0
cryptography==42.0.5
Expand Down
2 changes: 1 addition & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def downstream_requests(session: nox.Session) -> None:
session.install("-r", "requirements-dev.txt", silent=False)

session.cd(root)
session.install(".[socks]", silent=False)
session.install(".", silent=False)
session.cd(f"{tmp_dir}/requests")

session.run("python", "-c", "import urllib3; print(urllib3.__version__)")
Expand Down
73 changes: 70 additions & 3 deletions src/urllib3/_async/connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -792,17 +792,84 @@ async def get_response(
if self.pool is None:
raise ClosedPoolError(self, "Pool is closed")

if promise is not None and not isinstance(promise, ResponsePromise):
raise TypeError(
f"get_response only support ResponsePromise but received {type(promise)} instead. "
f"This may occur if you expected the remote peer to support multiplexing but did not."
)

clean_exit = True

try:
async with self.pool.borrow(
promise or ResponsePromise,
block=promise is not None,
not_idle_only=promise is None,
) as conn:
response = await conn.getresponse(
promise=promise, police_officer=self.pool
)
try:
response = await conn.getresponse(
promise=promise, police_officer=self.pool
)
except (BaseSSLError, OSError) as e:
if promise is not None:
url = typing.cast(str, promise.get_parameter("url"))
else:
url = ""
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
except UnavailableTraffic:
return None
except (
TimeoutError,
OSError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
ProxyError,
RecoverableError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
new_e: Exception = e
if isinstance(e, (BaseSSLError, CertificateError)):
new_e = SSLError(e)
if isinstance(
new_e,
(
OSError,
NewConnectionError,
TimeoutError,
SSLError,
),
) and (conn and conn.proxy and not conn.has_connected_to_proxy):
new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
elif isinstance(new_e, OSError):
new_e = ProtocolError("Connection aborted.", new_e)

if promise is not None:
retries = typing.cast(Retry, promise.get_parameter("retries"))

method = typing.cast(str, promise.get_parameter("method"))
url = typing.cast(str, promise.get_parameter("url"))

retries = retries.increment(
method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
)
await retries.async_sleep()
else:
raise new_e # we only retry if we were specified a specific promise. we can't blindly assume to retry.

# Keep track of the error for the retry warning.
err = e

if not clean_exit:
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)

return await self.get_response(promise=promise)

if promise is not None and response is None:
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion src/urllib3/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This file is protected via CODEOWNERS
from __future__ import annotations

__version__ = "2.11.902"
__version__ = "2.11.903"
4 changes: 4 additions & 0 deletions src/urllib3/backend/_async/hface.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,10 @@ async def _upgrade(self) -> None: # type: ignore[override]
self.conn_info.tls_handshake_latency.total_seconds()
)
self._max_tolerable_delay_for_upgrade *= 10.0
# we can, in rare case get self._max_tolerable_delay_for_upgrade <= 0.001
# we want to avoid this at all cost.
if self._max_tolerable_delay_for_upgrade <= 0.01:
self._max_tolerable_delay_for_upgrade = 3.0
else: # by default (safe/conservative fallback) to 3000ms
self._max_tolerable_delay_for_upgrade = 3.0

Expand Down
4 changes: 4 additions & 0 deletions src/urllib3/backend/hface.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,10 @@ def _upgrade(self) -> None:
self.conn_info.tls_handshake_latency.total_seconds()
)
self._max_tolerable_delay_for_upgrade *= 10.0
# we can, in rare case get self._max_tolerable_delay_for_upgrade == 0.0
# we want to avoid this at all cost.
if self._max_tolerable_delay_for_upgrade <= 0.01:
self._max_tolerable_delay_for_upgrade = 3.0
else: # by default (safe/conservative fallback) to 3000ms
self._max_tolerable_delay_for_upgrade = 3.0

Expand Down
65 changes: 64 additions & 1 deletion src/urllib3/connectionpool.py
Original file line number Diff line number Diff line change
Expand Up @@ -806,15 +806,78 @@ def get_response(
f"This may occur if you expected the remote peer to support multiplexing but did not."
)

clean_exit = True

try:
with self.pool.borrow(
promise or ResponsePromise,
block=promise is not None,
not_idle_only=promise is None,
) as conn:
response = conn.getresponse(promise=promise, police_officer=self.pool)
try:
response = conn.getresponse(
promise=promise, police_officer=self.pool
)
except (BaseSSLError, OSError) as e:
if promise is not None:
url = typing.cast(str, promise.get_parameter("url"))
else:
url = ""
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
except UnavailableTraffic:
return None
except (
TimeoutError,
OSError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
ProxyError,
RecoverableError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
new_e: Exception = e
if isinstance(e, (BaseSSLError, CertificateError)):
new_e = SSLError(e)
if isinstance(
new_e,
(
OSError,
NewConnectionError,
TimeoutError,
SSLError,
),
) and (conn and conn.proxy and not conn.has_connected_to_proxy):
new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
elif isinstance(new_e, OSError):
new_e = ProtocolError("Connection aborted.", new_e)

if promise is not None:
retries = typing.cast(Retry, promise.get_parameter("retries"))

method = typing.cast(str, promise.get_parameter("method"))
url = typing.cast(str, promise.get_parameter("url"))

retries = retries.increment(
method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
)
retries.sleep()
else:
raise e

# Keep track of the error for the retry warning.
err = e

if not clean_exit:
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)

return self.get_response(promise=promise)

if promise is not None and response is None:
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion src/urllib3/contrib/resolver/_async/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def from_url(url: str) -> AsyncResolverDescription:
value if value_converted is None else value_converted
)

host_patterns = []
host_patterns: list[str] = []

if "hosts" in kwargs:
host_patterns = (
Expand Down
2 changes: 1 addition & 1 deletion src/urllib3/contrib/resolver/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def from_url(url: str) -> ResolverDescription:
value if value_converted is None else value_converted
)

host_patterns = []
host_patterns: list[str] = []

if "hosts" in kwargs:
host_patterns = (
Expand Down
9 changes: 5 additions & 4 deletions test/test_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,8 @@ def test_cache_content_preload_false(self) -> None:

assert not r._body
assert r.data == b"foo"
assert r._body == b"foo"
# mypy bug?
assert r._body == b"foo" # type: ignore[comparison-overlap]
assert r.data == b"foo"

def test_default(self) -> None:
Expand Down Expand Up @@ -521,7 +522,7 @@ def test_io_not_autoclose_bufferedreader(self) -> None:
def test_io_textiowrapper(self) -> None:
fp = BytesIO(b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f")
resp = HTTPResponse(fp, preload_content=False)
br = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type]
br = TextIOWrapper(resp, encoding="utf8") # type: ignore[type-var]

assert br.read() == "äöüß"

Expand All @@ -535,14 +536,14 @@ def test_io_textiowrapper(self) -> None:
)
resp = HTTPResponse(fp, preload_content=False)
with pytest.raises(ValueError, match="I/O operation on closed file.?"):
list(TextIOWrapper(resp)) # type: ignore[arg-type]
list(TextIOWrapper(resp)) # type: ignore[type-var]

def test_io_not_autoclose_textiowrapper(self) -> None:
fp = BytesIO(
b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\n\xce\xb1\xce\xb2\xce\xb3\xce\xb4"
)
resp = HTTPResponse(fp, preload_content=False, auto_close=False)
reader = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type]
reader = TextIOWrapper(resp, encoding="utf8") # type: ignore[type-var]
assert list(reader) == ["äöüß\n", "αβγδ"]

assert not reader.closed
Expand Down

0 comments on commit c9cdc22

Please sign in to comment.