Skip to content

Commit 49e1e2b

Browse files
nessitajacobtylerwalls
authored andcommitted
[5.2.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE against the declared `Content-Length` header before reading. On the ASGI path, chunked requests carry no `Content-Length`, so the check evaluated to 0 and always passed regardless of the actual body size. This work adds a new check on the actual number of bytes consumed. Thanks to Superior for the report, and to Jake Howard and Jacob Walls for reviews. Backport of 953c238 from main.
1 parent 0b46789 commit 49e1e2b

4 files changed

Lines changed: 222 additions & 10 deletions

File tree

django/http/request.py

Lines changed: 35 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import codecs
22
import copy
33
import operator
4+
import os
45
from io import BytesIO
56
from itertools import chain
67
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
@@ -380,25 +381,49 @@ def body(self):
380381
"You cannot access body after reading from request's data stream"
381382
)
382383

383-
# Limit the maximum request data size that will be handled in-memory.
384-
if (
385-
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
386-
and int(self.META.get("CONTENT_LENGTH") or 0)
387-
> settings.DATA_UPLOAD_MAX_MEMORY_SIZE
388-
):
389-
raise RequestDataTooBig(
390-
"Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
391-
)
384+
# Limit the maximum request data size that will be handled
385+
# in-memory. Reject early when Content-Length is present and
386+
# already exceeds the limit, avoiding reading the body at all.
387+
self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0))
388+
389+
# Content-Length can be absent or understated (e.g.
390+
# `Transfer-Encoding: chunked` on ASGI), so for seekable
391+
# streams (e.g. SpooledTemporaryFile on ASGI), check the actual
392+
# buffered size before reading it all into memory.
393+
if hasattr(self._stream, "seekable") and self._stream.seekable():
394+
stream_size = self._stream.seek(0, os.SEEK_END)
395+
self._check_data_too_big(stream_size)
396+
self._stream.seek(0)
397+
did_check = True
398+
else:
399+
did_check = False
392400

393401
try:
394-
self._body = self.read()
402+
if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and not did_check:
403+
# Read one byte past the limit to detect an oversize body
404+
# without loading it all into memory first.
405+
self._body = self.read(settings.DATA_UPLOAD_MAX_MEMORY_SIZE + 1)
406+
else:
407+
self._body = self.read()
395408
except OSError as e:
396409
raise UnreadablePostError(*e.args) from e
397410
finally:
398411
self._stream.close()
399412
self._stream = BytesIO(self._body)
413+
if not did_check:
414+
stream_size = self._stream.seek(0, os.SEEK_END)
415+
self._check_data_too_big(stream_size)
416+
self._stream.seek(0)
400417
return self._body
401418

419+
def _check_data_too_big(self, length):
420+
if (
421+
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
422+
and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
423+
):
424+
msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
425+
raise RequestDataTooBig(msg)
426+
402427
def _mark_post_parse_error(self):
403428
self._post = QueryDict()
404429
self._files = MultiValueDict()

docs/releases/4.2.30.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
5656

5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
59+
60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.

docs/releases/5.2.13.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
5656

5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
59+
60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.

tests/asgi/tests.py

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import sys
33
import threading
44
import time
5+
from io import BytesIO
56
from pathlib import Path
67

78
from asgiref.sync import sync_to_async
@@ -29,6 +30,7 @@
2930
from .urls import sync_waiter, test_filename
3031

3132
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
33+
TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
3234

3335

3436
class SignalHandler:
@@ -696,3 +698,166 @@ async def test_streaming_disconnect(self):
696698
# 'last\n' isn't sent.
697699
with self.assertRaises(asyncio.TimeoutError):
698700
await communicator.receive_output(timeout=0.2)
701+
702+
703+
class DataUploadMaxMemorySizeASGITests(SimpleTestCase):
704+
705+
def make_request(
706+
self,
707+
body,
708+
content_type=b"application/octet-stream",
709+
content_length=None,
710+
stream=None,
711+
):
712+
scope = AsyncRequestFactory()._base_scope(method="POST", path="/")
713+
scope["headers"] = [(b"content-type", content_type)]
714+
if content_length is not None:
715+
scope["headers"].append((b"content-length", str(content_length).encode()))
716+
return ASGIRequest(scope, stream if stream is not None else BytesIO(body))
717+
718+
def test_body_size_not_exceeded_without_content_length(self):
719+
body = b"x" * 5
720+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
721+
self.assertEqual(self.make_request(body).body, body)
722+
723+
def test_body_size_exceeded_without_content_length(self):
724+
request = self.make_request(b"x" * 10)
725+
with (
726+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
727+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
728+
):
729+
request.body
730+
731+
def test_body_size_check_fires_before_read(self):
732+
# The seekable size check rejects oversized bodies before reading
733+
# them into memory (i.e. before calling self.read()).
734+
class TrackingBytesIO(BytesIO):
735+
calls = []
736+
737+
def read(self, *args, **kwargs):
738+
self.calls.append((args, kwargs))
739+
return super().read(*args, **kwargs)
740+
741+
stream = TrackingBytesIO(b"x" * 10)
742+
request = self.make_request(b"x" * 10, stream=stream)
743+
with (
744+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
745+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
746+
):
747+
request.body
748+
749+
self.assertEqual(stream.calls, [])
750+
751+
def test_post_size_exceeded_without_content_length(self):
752+
request = self.make_request(
753+
b"a=" + b"x" * 10,
754+
content_type=b"application/x-www-form-urlencoded",
755+
)
756+
with (
757+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
758+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
759+
):
760+
request.POST
761+
762+
def test_no_limit(self):
763+
body = b"x" * 100
764+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
765+
self.assertEqual(self.make_request(body).body, body)
766+
767+
async def test_read_body_no_limit(self):
768+
chunks = [
769+
{"type": "http.request", "body": b"x" * 100, "more_body": True},
770+
{"type": "http.request", "body": b"x" * 100, "more_body": False},
771+
]
772+
773+
async def receive():
774+
return chunks.pop(0)
775+
776+
handler = ASGIHandler()
777+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
778+
body_file = await handler.read_body(receive)
779+
self.addCleanup(body_file.close)
780+
781+
body_file.seek(0)
782+
self.assertEqual(body_file.read(), b"x" * 200)
783+
784+
def test_non_multipart_body_size_enforced(self):
785+
# DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies.
786+
request = self.make_request(b"x" * 100)
787+
with (
788+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10),
789+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
790+
):
791+
request.body
792+
793+
def test_multipart_file_upload_not_limited_by_data_upload_max(self):
794+
# DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file
795+
# upload whose total body exceeds the limit must still succeed.
796+
boundary = "testboundary"
797+
file_content = b"x" * 100
798+
body = (
799+
(
800+
f"--{boundary}\r\n"
801+
f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n'
802+
f"Content-Type: application/octet-stream\r\n"
803+
f"\r\n"
804+
).encode()
805+
+ file_content
806+
+ f"\r\n--{boundary}--\r\n".encode()
807+
)
808+
request = self.make_request(
809+
body,
810+
content_type=f"multipart/form-data; boundary={boundary}".encode(),
811+
content_length=len(body),
812+
)
813+
with self.settings(
814+
DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10
815+
):
816+
files = request.FILES
817+
self.assertEqual(len(files), 1)
818+
uploaded = files["file"]
819+
self.addCleanup(uploaded.close)
820+
self.assertEqual(uploaded.read(), file_content)
821+
822+
async def test_read_body_buffers_all_chunks(self):
823+
# read_body() consumes all chunks regardless of
824+
# DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when
825+
# HttpRequest.body is accessed.
826+
chunks = [
827+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
828+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
829+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
830+
]
831+
832+
async def receive():
833+
return chunks.pop(0)
834+
835+
handler = ASGIHandler()
836+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
837+
body_file = await handler.read_body(receive)
838+
self.addCleanup(body_file.close)
839+
840+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
841+
body_file.seek(0)
842+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)
843+
844+
async def test_read_body_multipart_not_limited(self):
845+
# All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE;
846+
# multipart size enforcement happens inside MultiPartParser, not here.
847+
chunks = [
848+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
849+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
850+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
851+
]
852+
853+
async def receive():
854+
return chunks.pop(0)
855+
856+
handler = ASGIHandler()
857+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
858+
body_file = await handler.read_body(receive)
859+
self.addCleanup(body_file.close)
860+
861+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
862+
body_file.seek(0)
863+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)

0 commit comments

Comments
 (0)