Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
IteratorByteStream: split large iterator chunks into 64KB pieces
  • Loading branch information
Treadgold committed Jan 31, 2026
commit fc2d503b4aa2dbda9180046fef63e9e8e7d04516
1 change: 1 addition & 0 deletions httpx/_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def __iter__(self) -> Iterator[bytes]:
yield part[offset : offset + chunk_size]
offset += chunk_size


class AsyncIteratorByteStream(AsyncByteStream):
CHUNK_SIZE = 65_536

Expand Down
25 changes: 25 additions & 0 deletions tests/test_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,3 +516,28 @@ def test_allow_nan_false():
ValueError, match="Out of range float values are not JSON compliant"
):
httpx.Response(200, json=data_with_inf)


@pytest.mark.anyio
async def test_iterator_content_splits_large_chunks():
# Generator yielding a single large chunk (100 KB)
large_chunk = b"a" * 102_400 # 100 KB

def gen() -> typing.Iterator[bytes]:
yield large_chunk

# Pass generator to Request (internally uses IteratorByteStream)
request = httpx.Request(method, url, content=gen())

# Cast to Iterable[bytes] to make mypy happy
sync_stream: typing.Iterable[bytes] = request.stream # type: ignore

# Collect chunks
chunks = list(sync_stream)

# Each chunk must be <= 64 KB
for chunk in chunks:
assert len(chunk) <= 64 * 1024

# Total content matches original
assert b"".join(chunks) == large_chunk