Skip to content

Commit ed4dfda

Browse files
nessitajacobtylerwalls
authored andcommitted
[4.2.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE against the declared `Content-Length` header before reading. On the ASGI path, chunked requests carry no `Content-Length`, so the check evaluated to 0 and always passed regardless of the actual body size. This work adds a new check on the actual number of bytes consumed. Thanks to Superior for the report, and to Jake Howard and Jacob Walls for reviews. Backport of 953c238 from main.
1 parent f13c20f commit ed4dfda

3 files changed

Lines changed: 204 additions & 11 deletions

File tree

django/http/request.py

Lines changed: 35 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import codecs
22
import copy
3+
import os
34
from io import BytesIO
45
from itertools import chain
56
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
@@ -328,25 +329,49 @@ def body(self):
328329
"You cannot access body after reading from request's data stream"
329330
)
330331

331-
# Limit the maximum request data size that will be handled in-memory.
332-
if (
333-
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
334-
and int(self.META.get("CONTENT_LENGTH") or 0)
335-
> settings.DATA_UPLOAD_MAX_MEMORY_SIZE
336-
):
337-
raise RequestDataTooBig(
338-
"Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
339-
)
332+
# Limit the maximum request data size that will be handled
333+
# in-memory. Reject early when Content-Length is present and
334+
# already exceeds the limit, avoiding reading the body at all.
335+
self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0))
336+
337+
# Content-Length can be absent or understated (e.g.
338+
# `Transfer-Encoding: chunked` on ASGI), so for seekable
339+
# streams (e.g. SpooledTemporaryFile on ASGI), check the actual
340+
# buffered size before reading it all into memory.
341+
if hasattr(self._stream, "seekable") and self._stream.seekable():
342+
stream_size = self._stream.seek(0, os.SEEK_END)
343+
self._check_data_too_big(stream_size)
344+
self._stream.seek(0)
345+
did_check = True
346+
else:
347+
did_check = False
340348

341349
try:
342-
self._body = self.read()
350+
if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and not did_check:
351+
# Read one byte past the limit to detect an oversize body
352+
# without loading it all into memory first.
353+
self._body = self.read(settings.DATA_UPLOAD_MAX_MEMORY_SIZE + 1)
354+
else:
355+
self._body = self.read()
343356
except OSError as e:
344357
raise UnreadablePostError(*e.args) from e
345358
finally:
346359
self._stream.close()
347360
self._stream = BytesIO(self._body)
361+
if not did_check:
362+
stream_size = self._stream.seek(0, os.SEEK_END)
363+
self._check_data_too_big(stream_size)
364+
self._stream.seek(0)
348365
return self._body
349366

367+
def _check_data_too_big(self, length):
368+
if (
369+
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
370+
and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
371+
):
372+
msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
373+
raise RequestDataTooBig(msg)
374+
350375
def _mark_post_parse_error(self):
351376
self._post = QueryDict()
352377
self._files = MultiValueDict()

docs/releases/4.2.30.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
5656

5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
59+
60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.

tests/asgi/tests.py

Lines changed: 158 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
import asyncio
22
import sys
33
import threading
4+
from io import BytesIO
45
from pathlib import Path
56

67
from asgiref.testing import ApplicationCommunicator
78

89
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
9-
from django.core.asgi import get_asgi_application
10+
from django.core.asgi import ASGIHandler, get_asgi_application
11+
from django.core.exceptions import RequestDataTooBig
1012
from django.core.handlers.asgi import ASGIRequest
1113
from django.core.signals import request_finished, request_started
1214
from django.db import close_old_connections
@@ -22,6 +24,7 @@
2224
from .urls import sync_waiter, test_filename
2325

2426
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
27+
TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
2528

2629

2730
@override_settings(ROOT_URLCONF="asgi.urls")
@@ -356,3 +359,157 @@ async def test_concurrent_async_uses_multiple_thread_pools(self):
356359
self.assertEqual(len(sync_waiter.active_threads), 2)
357360

358361
sync_waiter.active_threads.clear()
362+
363+
364+
class DataUploadMaxMemorySizeASGITests(SimpleTestCase):
365+
def make_request(
366+
self,
367+
body,
368+
content_type=b"application/octet-stream",
369+
content_length=None,
370+
stream=None,
371+
):
372+
scope = AsyncRequestFactory()._base_scope(method="POST", path="/")
373+
scope["headers"] = [(b"content-type", content_type)]
374+
if content_length is not None:
375+
scope["headers"].append((b"content-length", str(content_length).encode()))
376+
return ASGIRequest(scope, stream if stream is not None else BytesIO(body))
377+
378+
def test_body_size_not_exceeded_without_content_length(self):
379+
body = b"x" * 5
380+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
381+
self.assertEqual(self.make_request(body).body, body)
382+
383+
def test_body_size_exceeded_without_content_length(self):
384+
request = self.make_request(b"x" * 10)
385+
with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
386+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
387+
request.body
388+
389+
def test_body_size_check_fires_before_read(self):
390+
# The seekable size check rejects oversized bodies before reading
391+
# them into memory (i.e. before calling self.read()).
392+
class TrackingBytesIO(BytesIO):
393+
calls = []
394+
395+
def read(self, *args, **kwargs):
396+
self.calls.append((args, kwargs))
397+
return super().read(*args, **kwargs)
398+
399+
stream = TrackingBytesIO(b"x" * 10)
400+
request = self.make_request(b"x" * 10, stream=stream)
401+
with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
402+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
403+
request.body
404+
405+
self.assertEqual(stream.calls, [])
406+
407+
def test_post_size_exceeded_without_content_length(self):
408+
request = self.make_request(
409+
b"a=" + b"x" * 10,
410+
content_type=b"application/x-www-form-urlencoded",
411+
)
412+
with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
413+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
414+
request.POST
415+
416+
def test_no_limit(self):
417+
body = b"x" * 100
418+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
419+
self.assertEqual(self.make_request(body).body, body)
420+
421+
async def test_read_body_no_limit(self):
422+
chunks = [
423+
{"type": "http.request", "body": b"x" * 100, "more_body": True},
424+
{"type": "http.request", "body": b"x" * 100, "more_body": False},
425+
]
426+
427+
async def receive():
428+
return chunks.pop(0)
429+
430+
handler = ASGIHandler()
431+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
432+
body_file = await handler.read_body(receive)
433+
self.addCleanup(body_file.close)
434+
435+
body_file.seek(0)
436+
self.assertEqual(body_file.read(), b"x" * 200)
437+
438+
def test_non_multipart_body_size_enforced(self):
439+
# DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies.
440+
request = self.make_request(b"x" * 100)
441+
with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
442+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10):
443+
request.body
444+
445+
def test_multipart_file_upload_not_limited_by_data_upload_max(self):
446+
# DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file
447+
# upload whose total body exceeds the limit must still succeed.
448+
boundary = "testboundary"
449+
file_content = b"x" * 100
450+
body = (
451+
(
452+
f"--{boundary}\r\n"
453+
f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n'
454+
f"Content-Type: application/octet-stream\r\n"
455+
f"\r\n"
456+
).encode()
457+
+ file_content
458+
+ f"\r\n--{boundary}--\r\n".encode()
459+
)
460+
request = self.make_request(
461+
body,
462+
content_type=f"multipart/form-data; boundary={boundary}".encode(),
463+
content_length=len(body),
464+
)
465+
with self.settings(
466+
DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10
467+
):
468+
files = request.FILES
469+
self.assertEqual(len(files), 1)
470+
uploaded = files["file"]
471+
self.addCleanup(uploaded.close)
472+
self.assertEqual(uploaded.read(), file_content)
473+
474+
async def test_read_body_buffers_all_chunks(self):
475+
# read_body() consumes all chunks regardless of
476+
# DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when
477+
# HttpRequest.body is accessed.
478+
chunks = [
479+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
480+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
481+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
482+
]
483+
484+
async def receive():
485+
return chunks.pop(0)
486+
487+
handler = ASGIHandler()
488+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
489+
body_file = await handler.read_body(receive)
490+
self.addCleanup(body_file.close)
491+
492+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
493+
body_file.seek(0)
494+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)
495+
496+
async def test_read_body_multipart_not_limited(self):
497+
# All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE;
498+
# multipart size enforcement happens inside MultiPartParser, not here.
499+
chunks = [
500+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
501+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
502+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
503+
]
504+
505+
async def receive():
506+
return chunks.pop(0)
507+
508+
handler = ASGIHandler()
509+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
510+
body_file = await handler.read_body(receive)
511+
self.addCleanup(body_file.close)
512+
513+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
514+
body_file.seek(0)
515+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)

0 commit comments

Comments
 (0)