Skip to content

Commit

Permalink
Fix CachedResposnse.read() and ClientResponse.read() consistency (#…
Browse files Browse the repository at this point in the history
…279)

* fix: make 'read()' consistent with 'ClientResponse.read()'
* test: extend test_streaming_requests
  • Loading branch information
alessio-locatelli authored Nov 7, 2024
1 parent b9a4b2a commit bd0bc75
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 18 deletions.
1 change: 1 addition & 0 deletions HISTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## (unreleased)

- Fixed `CachedResponse.read()` to be consistent with `ClientResponse.read()` by allowing to call `read()` multiple times. (#289)
- Now a warning is raised when a cache backend is accessed after disconnecting (after exiting the `CachedSession` context manager). (#241)

## 0.12.4 (2024-10-30)
Expand Down
6 changes: 4 additions & 2 deletions aiohttp_client_cache/response.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class CachedResponse(HeadersMixin):
status: int = attr.ib()
url: URL = attr.ib(converter=URL)
version: str = attr.ib()
_body: Any = attr.ib(default=b'')
_body: Any = attr.ib(default=None)
_content: StreamReader | None = attr.ib(default=None)
_links: LinkItems = attr.ib(factory=list)
cookies: SimpleCookie = attr.ib(factory=SimpleCookie)
Expand Down Expand Up @@ -215,7 +215,9 @@ def raise_for_status(self):

async def read(self) -> bytes:
"""Read response payload."""
return await self.content.read()
# We are ready to return the body because `read()`
# was called on a `CachedResponse` creation.
return self._body

def reset(self):
"""Reset the stream reader to re-read a streamed response"""
Expand Down
42 changes: 26 additions & 16 deletions test/integration/base_backend_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import asyncio
import pickle
from contextlib import asynccontextmanager

from test.conftest import (
ALL_METHODS,
CACHE_NAME,
Expand Down Expand Up @@ -174,28 +175,37 @@ async def test_include_headers(self):
async def test_streaming_requests(self):
"""Test that streaming requests work both for the original and cached responses"""
async with self.init_session() as session:
for _ in range(2):
response = cast(CachedResponse, await session.get(httpbin('stream-bytes/64')))
for i in range(2):
any_response = await session.get(httpbin('stream-bytes/64'))

# Can read multiple times.
assert {len(await any_response.read()) for _ in range(3)} == {64}

if i == 0:
continue

response = cast(CachedResponse, any_response)

lines = [line async for line in response.content]
assert len(b''.join(lines)) == 64

# Test some additional methods on the cached response
response.reset()
chunks = [c async for (c, _) in response.content.iter_chunks()]
assert len(b''.join(chunks)) == 64
response.reset()
# Test some additional methods on the cached response
response.reset()
chunks = [c async for (c, _) in response.content.iter_chunks()]
assert len(b''.join(chunks)) == 64
response.reset()

chunks = [c async for c in response.content.iter_chunked(2)]
assert len(b''.join(chunks)) == 64
response.reset()
chunks = [c async for c in response.content.iter_chunked(2)]
assert len(b''.join(chunks)) == 64
response.reset()

chunks = [c async for c in response.content.iter_any()]
assert len(b''.join(chunks)) == 64
response.reset()
chunks = [c async for c in response.content.iter_any()]
assert len(b''.join(chunks)) == 64
response.reset()

# readany() should return empty bytes after being consumed
assert len(await response.content.readany()) == 64
assert await response.content.readany() == b''
# readany() should return empty bytes after being consumed
assert len(await response.content.readany()) == 64
assert await response.content.readany() == b''

async def test_streaming_request__ignored(self):
"""If a streaming request is filtered out (expire_after=0), its body should be readable as usual"""
Expand Down

0 comments on commit bd0bc75

Please sign in to comment.