Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions CHANGES/11105.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
Fixed an issue where cookies with duplicate names but different domains or paths
were lost when updating the cookie jar. The :class:`~aiohttp.ClientSession`
cookie jar now correctly stores all cookies even if they have the same name but
different domain or path, following the :rfc:`6265#section-5.3` storage model -- by :user:`bdraco`.

Note that :attr:`ClientResponse.cookies <aiohttp.ClientResponse.cookies>` returns
a :class:`~http.cookies.SimpleCookie` which uses the cookie name as a key, so
only the last cookie with each name is accessible via this interface. All cookies
can be accessed via :meth:`ClientResponse.headers.getall('Set-Cookie')
<multidict.MultiDictProxy.getall>` if needed.
1 change: 1 addition & 0 deletions CHANGES/11106.bugfix.rst
1 change: 1 addition & 0 deletions CHANGES/11107.misc.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Avoided creating closed futures in ``ResponseHandler`` that will never be awaited -- by :user:`bdraco`.
1 change: 1 addition & 0 deletions CHANGES/4486.bugfix.rst
29 changes: 28 additions & 1 deletion aiohttp/abc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import socket
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel
from http.cookies import BaseCookie, CookieError, Morsel, SimpleCookie
from typing import (
TYPE_CHECKING,
Any,
Expand All @@ -13,6 +13,7 @@
Iterable,
List,
Optional,
Sequence,
Tuple,
TypedDict,
Union,
Expand All @@ -21,6 +22,7 @@
from multidict import CIMultiDict
from yarl import URL

from .log import client_logger
from .typedefs import LooseCookies

if TYPE_CHECKING:
Expand Down Expand Up @@ -188,6 +190,31 @@ def clear_domain(self, domain: str) -> None:
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""

def update_cookies_from_headers(
self, headers: Sequence[str], response_url: URL
) -> None:
"""
Update cookies from raw Set-Cookie headers.

Default implementation parses each header separately to preserve
cookies with same name but different domain/path.
"""
# Default implementation for backward compatibility
cookies_to_update: List[Tuple[str, Morsel[str]]] = []
for cookie_header in headers:
tmp_cookie = SimpleCookie()
try:
tmp_cookie.load(cookie_header)
# Collect all cookies as tuples (name, morsel)
for name, morsel in tmp_cookie.items():
cookies_to_update.append((name, morsel))
except CookieError as exc:
client_logger.warning("Can not load response cookies: %s", exc)

# Update all cookies at once for efficiency
if cookies_to_update:
self.update_cookies(cookies_to_update, response_url)

@abstractmethod
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
"""Return the jar's cookies filtered by their attributes."""
Expand Down
7 changes: 5 additions & 2 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,8 +722,11 @@ async def _connect_and_send_request(
raise
raise ClientOSError(*exc.args) from exc

if cookies := resp._cookies:
self._cookie_jar.update_cookies(cookies, resp.url)
# Update cookies from raw headers to preserve duplicates
if resp._raw_cookie_headers:
self._cookie_jar.update_cookies_from_headers(
resp._raw_cookie_headers, resp.url
)

# redirects
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
Expand Down
56 changes: 38 additions & 18 deletions aiohttp/client_proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,26 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:

self._timeout_ceil_threshold: Optional[float] = 5

self.closed: asyncio.Future[None] = self._loop.create_future()
self._closed: Union[None, asyncio.Future[None]] = None
self._connection_lost_called = False

@property
def closed(self) -> Union[None, asyncio.Future[None]]:
"""Future that is set when the connection is closed.

This property returns a Future that will be completed when the connection
is closed. The Future is created lazily on first access to avoid creating
futures that will never be awaited.

Returns:
- A Future[None] if the connection is still open or was closed after
this property was accessed
- None if connection_lost() was already called before this property
was ever accessed (indicating no one is waiting for the closure)
"""
if self._closed is None and not self._connection_lost_called:
self._closed = self._loop.create_future()
return self._closed

@property
def upgraded(self) -> bool:
Expand Down Expand Up @@ -80,30 +99,31 @@ def is_connected(self) -> bool:
return self.transport is not None and not self.transport.is_closing()

def connection_lost(self, exc: Optional[BaseException]) -> None:
self._connection_lost_called = True
self._drop_timeout()

original_connection_error = exc
reraised_exc = original_connection_error

connection_closed_cleanly = original_connection_error is None

if connection_closed_cleanly:
set_result(self.closed, None)
else:
assert original_connection_error is not None
set_exception(
self.closed,
ClientConnectionError(
f"Connection lost: {original_connection_error !s}",
),
original_connection_error,
)
# Mark the exception as retrieved to prevent
# "Future exception was never retrieved" warnings
# The exception is always passed on through
# other means, so this is safe
with suppress(Exception):
self.closed.exception()
if self._closed is not None:
# If someone is waiting for the closed future,
# we should set it to None or an exception. If
# self._closed is None, it means that
# connection_lost() was called already
# or nobody is waiting for it.
if connection_closed_cleanly:
set_result(self._closed, None)
else:
assert original_connection_error is not None
set_exception(
self._closed,
ClientConnectionError(
f"Connection lost: {original_connection_error !s}",
),
original_connection_error,
)

if self._payload_parser is not None:
with suppress(Exception): # FIXME: log this somehow?
Expand Down
29 changes: 21 additions & 8 deletions aiohttp/client_reqrep.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,7 @@ class ClientResponse(HeadersMixin):

_connection: Optional["Connection"] = None # current connection
_cookies: Optional[SimpleCookie] = None
_raw_cookie_headers: Optional[Tuple[str, ...]] = None
_continue: Optional["asyncio.Future[bool]"] = None
_source_traceback: Optional[traceback.StackSummary] = None
_session: Optional["ClientSession"] = None
Expand Down Expand Up @@ -309,12 +310,29 @@ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
@property
def cookies(self) -> SimpleCookie:
if self._cookies is None:
self._cookies = SimpleCookie()
if self._raw_cookie_headers is not None:
# Parse cookies for response.cookies (SimpleCookie for backward compatibility)
cookies = SimpleCookie()
for hdr in self._raw_cookie_headers:
try:
cookies.load(hdr)
except CookieError as exc:
client_logger.warning("Can not load response cookies: %s", exc)
self._cookies = cookies
else:
self._cookies = SimpleCookie()
return self._cookies

@cookies.setter
def cookies(self, cookies: SimpleCookie) -> None:
self._cookies = cookies
# Generate raw cookie headers from the SimpleCookie
if cookies:
self._raw_cookie_headers = tuple(
morsel.OutputString() for morsel in cookies.values()
)
else:
self._raw_cookie_headers = None

@reify
def url(self) -> URL:
Expand Down Expand Up @@ -474,13 +492,8 @@ async def start(self, connection: "Connection") -> "ClientResponse":

# cookies
if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
cookies = SimpleCookie()
for hdr in cookie_hdrs:
try:
cookies.load(hdr)
except CookieError as exc:
client_logger.warning("Can not load response cookies: %s", exc)
self._cookies = cookies
# Store raw cookie headers for CookieJar
self._raw_cookie_headers = tuple(cookie_hdrs)
return self

def _response_eof(self) -> None:
Expand Down
8 changes: 5 additions & 3 deletions aiohttp/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,13 +462,15 @@ def _close_immediately(self) -> List[Awaitable[object]]:
self._cleanup_closed_handle.cancel()

for data in self._conns.values():
for proto, t0 in data:
for proto, _ in data:
proto.close()
waiters.append(proto.closed)
if closed := proto.closed:
waiters.append(closed)

for proto in self._acquired:
proto.close()
waiters.append(proto.closed)
if closed := proto.closed:
waiters.append(closed)

# TODO (A.Yushovskiy, 24-May-2019) collect transp. closing futures
for transport in self._cleanup_closed_transports:
Expand Down
13 changes: 13 additions & 0 deletions docs/client_reference.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1483,6 +1483,19 @@ Response object
HTTP cookies of response (*Set-Cookie* HTTP header,
:class:`~http.cookies.SimpleCookie`).

.. note::

Since :class:`~http.cookies.SimpleCookie` uses cookie name as the
key, cookies with the same name but different domains or paths will
be overwritten. Only the last cookie with a given name will be
accessible via this attribute.

To access all cookies, including duplicates with the same name,
use :meth:`response.headers.getall('Set-Cookie') <multidict.MultiDictProxy.getall>`.

The session's cookie jar will correctly store all cookies, even if
they are not accessible via this attribute.

.. attribute:: headers

A case-insensitive multidict proxy with HTTP headers of
Expand Down
Loading
Loading