Skip to content

Commit 393dbc5

Browse files
nessitajacobtylerwalls
authored andcommitted
[6.0.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE against the declared `Content-Length` header before reading. On the ASGI path, chunked requests carry no `Content-Length`, so the check evaluated to 0 and always passed regardless of the actual body size. This work adds a new check on the actual number of bytes consumed. Thanks to Superior for the report, and to Jake Howard and Jacob Walls for reviews. Backport of 953c238 from main.
1 parent 0910af6 commit 393dbc5

5 files changed

Lines changed: 219 additions & 9 deletions

File tree

django/http/request.py

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import codecs
22
import copy
33
import operator
4+
import os
45
from io import BytesIO
56
from itertools import chain
67
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
@@ -385,15 +386,18 @@ def body(self):
385386
)
386387

387388
# Limit the maximum request data size that will be handled
388-
# in-memory.
389-
if (
390-
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
391-
and int(self.META.get("CONTENT_LENGTH") or 0)
392-
> settings.DATA_UPLOAD_MAX_MEMORY_SIZE
393-
):
394-
raise RequestDataTooBig(
395-
"Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
396-
)
389+
# in-memory. Reject early when Content-Length is present and
390+
# already exceeds the limit, avoiding reading the body at all.
391+
self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0))
392+
393+
# Content-Length can be absent or understated (e.g.
394+
# `Transfer-Encoding: chunked` on ASGI), so for seekable
395+
# streams (e.g. SpooledTemporaryFile on ASGI), check the actual
396+
# buffered size before reading it all into memory.
397+
if self._stream.seekable():
398+
stream_size = self._stream.seek(0, os.SEEK_END)
399+
self._check_data_too_big(stream_size)
400+
self._stream.seek(0)
397401

398402
try:
399403
self._body = self.read()
@@ -404,6 +408,14 @@ def body(self):
404408
self._stream = BytesIO(self._body)
405409
return self._body
406410

411+
def _check_data_too_big(self, length):
412+
if (
413+
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
414+
and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
415+
):
416+
msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
417+
raise RequestDataTooBig(msg)
418+
407419
def _mark_post_parse_error(self):
408420
self._post = QueryDict()
409421
self._files = MultiValueDict()

docs/releases/4.2.30.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
5656

5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
59+
60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.

docs/releases/5.2.13.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
5656

5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
59+
60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.

docs/releases/6.0.4.txt

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,17 @@ may trigger repeated memory copying, potentially degrading performance.
5757
This issue has severity "moderate" according to the :ref:`Django security
5858
policy <security-disclosure>`.
5959

60+
CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
61+
=========================================================================================================
62+
63+
ASGI requests with a missing or understated ``Content-Length`` header could
64+
bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
65+
``HttpRequest.body``, potentially loading an unbounded request body into
66+
memory and causing service degradation.
67+
68+
This issue has severity "low" according to the :ref:`Django security policy
69+
<security-disclosure>`.
70+
6071
Bugfixes
6172
========
6273

tests/asgi/tests.py

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import tempfile
44
import threading
55
import time
6+
from io import BytesIO
67
from pathlib import Path
78
from unittest.mock import patch
89

@@ -31,6 +32,7 @@
3132
from .urls import sync_waiter, test_filename
3233

3334
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
35+
TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
3436

3537

3638
class SignalHandler:
@@ -800,3 +802,166 @@ def test_multiple_cookie_headers_http2(self):
800802
request = ASGIRequest(scope, None)
801803
self.assertEqual(request.META["HTTP_COOKIE"], "a=abc; b=def; c=ghi")
802804
self.assertEqual(request.COOKIES, {"a": "abc", "b": "def", "c": "ghi"})
805+
806+
807+
class DataUploadMaxMemorySizeASGITests(SimpleTestCase):
808+
809+
def make_request(
810+
self,
811+
body,
812+
content_type=b"application/octet-stream",
813+
content_length=None,
814+
stream=None,
815+
):
816+
scope = AsyncRequestFactory()._base_scope(method="POST", path="/")
817+
scope["headers"] = [(b"content-type", content_type)]
818+
if content_length is not None:
819+
scope["headers"].append((b"content-length", str(content_length).encode()))
820+
return ASGIRequest(scope, stream if stream is not None else BytesIO(body))
821+
822+
def test_body_size_not_exceeded_without_content_length(self):
823+
body = b"x" * 5
824+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
825+
self.assertEqual(self.make_request(body).body, body)
826+
827+
def test_body_size_exceeded_without_content_length(self):
828+
request = self.make_request(b"x" * 10)
829+
with (
830+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
831+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
832+
):
833+
request.body
834+
835+
def test_body_size_check_fires_before_read(self):
836+
# The seekable size check rejects oversized bodies before reading
837+
# them into memory (i.e. before calling self.read()).
838+
class TrackingBytesIO(BytesIO):
839+
calls = []
840+
841+
def read(self, *args, **kwargs):
842+
self.calls.append((args, kwargs))
843+
return super().read(*args, **kwargs)
844+
845+
stream = TrackingBytesIO(b"x" * 10)
846+
request = self.make_request(b"x" * 10, stream=stream)
847+
with (
848+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
849+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
850+
):
851+
request.body
852+
853+
self.assertEqual(stream.calls, [])
854+
855+
def test_post_size_exceeded_without_content_length(self):
856+
request = self.make_request(
857+
b"a=" + b"x" * 10,
858+
content_type=b"application/x-www-form-urlencoded",
859+
)
860+
with (
861+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5),
862+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
863+
):
864+
request.POST
865+
866+
def test_no_limit(self):
867+
body = b"x" * 100
868+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
869+
self.assertEqual(self.make_request(body).body, body)
870+
871+
async def test_read_body_no_limit(self):
872+
chunks = [
873+
{"type": "http.request", "body": b"x" * 100, "more_body": True},
874+
{"type": "http.request", "body": b"x" * 100, "more_body": False},
875+
]
876+
877+
async def receive():
878+
return chunks.pop(0)
879+
880+
handler = ASGIHandler()
881+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
882+
body_file = await handler.read_body(receive)
883+
self.addCleanup(body_file.close)
884+
885+
body_file.seek(0)
886+
self.assertEqual(body_file.read(), b"x" * 200)
887+
888+
def test_non_multipart_body_size_enforced(self):
889+
# DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies.
890+
request = self.make_request(b"x" * 100)
891+
with (
892+
self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10),
893+
self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG),
894+
):
895+
request.body
896+
897+
def test_multipart_file_upload_not_limited_by_data_upload_max(self):
898+
# DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file
899+
# upload whose total body exceeds the limit must still succeed.
900+
boundary = "testboundary"
901+
file_content = b"x" * 100
902+
body = (
903+
(
904+
f"--{boundary}\r\n"
905+
f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n'
906+
f"Content-Type: application/octet-stream\r\n"
907+
f"\r\n"
908+
).encode()
909+
+ file_content
910+
+ f"\r\n--{boundary}--\r\n".encode()
911+
)
912+
request = self.make_request(
913+
body,
914+
content_type=f"multipart/form-data; boundary={boundary}".encode(),
915+
content_length=len(body),
916+
)
917+
with self.settings(
918+
DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10
919+
):
920+
files = request.FILES
921+
self.assertEqual(len(files), 1)
922+
uploaded = files["file"]
923+
self.addCleanup(uploaded.close)
924+
self.assertEqual(uploaded.read(), file_content)
925+
926+
async def test_read_body_buffers_all_chunks(self):
927+
# read_body() consumes all chunks regardless of
928+
# DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when
929+
# HttpRequest.body is accessed.
930+
chunks = [
931+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
932+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
933+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
934+
]
935+
936+
async def receive():
937+
return chunks.pop(0)
938+
939+
handler = ASGIHandler()
940+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
941+
body_file = await handler.read_body(receive)
942+
self.addCleanup(body_file.close)
943+
944+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
945+
body_file.seek(0)
946+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)
947+
948+
async def test_read_body_multipart_not_limited(self):
949+
# All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE;
950+
# multipart size enforcement happens inside MultiPartParser, not here.
951+
chunks = [
952+
{"type": "http.request", "body": b"x" * 10, "more_body": True},
953+
{"type": "http.request", "body": b"y" * 10, "more_body": True},
954+
{"type": "http.request", "body": b"z" * 10, "more_body": False},
955+
]
956+
957+
async def receive():
958+
return chunks.pop(0)
959+
960+
handler = ASGIHandler()
961+
with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
962+
body_file = await handler.read_body(receive)
963+
self.addCleanup(body_file.close)
964+
965+
self.assertEqual(len(chunks), 0) # All chunks were consumed.
966+
body_file.seek(0)
967+
self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)

0 commit comments

Comments
 (0)