|
1 | 1 | import asyncio |
2 | 2 | import sys |
3 | 3 | import threading |
| 4 | +from io import BytesIO |
4 | 5 | from pathlib import Path |
5 | 6 |
|
6 | 7 | from asgiref.testing import ApplicationCommunicator |
7 | 8 |
|
8 | 9 | from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler |
9 | | -from django.core.asgi import get_asgi_application |
| 10 | +from django.core.asgi import ASGIHandler, get_asgi_application |
| 11 | +from django.core.exceptions import RequestDataTooBig |
10 | 12 | from django.core.handlers.asgi import ASGIRequest |
11 | 13 | from django.core.signals import request_finished, request_started |
12 | 14 | from django.db import close_old_connections |
|
22 | 24 | from .urls import sync_waiter, test_filename |
23 | 25 |
|
24 | 26 | TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static" |
| 27 | +TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." |
25 | 28 |
|
26 | 29 |
|
27 | 30 | @override_settings(ROOT_URLCONF="asgi.urls") |
@@ -356,3 +359,157 @@ async def test_concurrent_async_uses_multiple_thread_pools(self): |
356 | 359 | self.assertEqual(len(sync_waiter.active_threads), 2) |
357 | 360 |
|
358 | 361 | sync_waiter.active_threads.clear() |
| 362 | + |
| 363 | + |
| 364 | +class DataUploadMaxMemorySizeASGITests(SimpleTestCase): |
| 365 | + def make_request( |
| 366 | + self, |
| 367 | + body, |
| 368 | + content_type=b"application/octet-stream", |
| 369 | + content_length=None, |
| 370 | + stream=None, |
| 371 | + ): |
| 372 | + scope = AsyncRequestFactory()._base_scope(method="POST", path="/") |
| 373 | + scope["headers"] = [(b"content-type", content_type)] |
| 374 | + if content_length is not None: |
| 375 | + scope["headers"].append((b"content-length", str(content_length).encode())) |
| 376 | + return ASGIRequest(scope, stream if stream is not None else BytesIO(body)) |
| 377 | + |
| 378 | + def test_body_size_not_exceeded_without_content_length(self): |
| 379 | + body = b"x" * 5 |
| 380 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 381 | + self.assertEqual(self.make_request(body).body, body) |
| 382 | + |
| 383 | + def test_body_size_exceeded_without_content_length(self): |
| 384 | + request = self.make_request(b"x" * 10) |
| 385 | + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): |
| 386 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 387 | + request.body |
| 388 | + |
| 389 | + def test_body_size_check_fires_before_read(self): |
| 390 | + # The seekable size check rejects oversized bodies before reading |
| 391 | + # them into memory (i.e. before calling self.read()). |
| 392 | + class TrackingBytesIO(BytesIO): |
| 393 | + calls = [] |
| 394 | + |
| 395 | + def read(self, *args, **kwargs): |
| 396 | + self.calls.append((args, kwargs)) |
| 397 | + return super().read(*args, **kwargs) |
| 398 | + |
| 399 | + stream = TrackingBytesIO(b"x" * 10) |
| 400 | + request = self.make_request(b"x" * 10, stream=stream) |
| 401 | + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): |
| 402 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 403 | + request.body |
| 404 | + |
| 405 | + self.assertEqual(stream.calls, []) |
| 406 | + |
| 407 | + def test_post_size_exceeded_without_content_length(self): |
| 408 | + request = self.make_request( |
| 409 | + b"a=" + b"x" * 10, |
| 410 | + content_type=b"application/x-www-form-urlencoded", |
| 411 | + ) |
| 412 | + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): |
| 413 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 414 | + request.POST |
| 415 | + |
| 416 | + def test_no_limit(self): |
| 417 | + body = b"x" * 100 |
| 418 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 419 | + self.assertEqual(self.make_request(body).body, body) |
| 420 | + |
| 421 | + async def test_read_body_no_limit(self): |
| 422 | + chunks = [ |
| 423 | + {"type": "http.request", "body": b"x" * 100, "more_body": True}, |
| 424 | + {"type": "http.request", "body": b"x" * 100, "more_body": False}, |
| 425 | + ] |
| 426 | + |
| 427 | + async def receive(): |
| 428 | + return chunks.pop(0) |
| 429 | + |
| 430 | + handler = ASGIHandler() |
| 431 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 432 | + body_file = await handler.read_body(receive) |
| 433 | + self.addCleanup(body_file.close) |
| 434 | + |
| 435 | + body_file.seek(0) |
| 436 | + self.assertEqual(body_file.read(), b"x" * 200) |
| 437 | + |
| 438 | + def test_non_multipart_body_size_enforced(self): |
| 439 | + # DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies. |
| 440 | + request = self.make_request(b"x" * 100) |
| 441 | + with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG): |
| 442 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10): |
| 443 | + request.body |
| 444 | + |
| 445 | + def test_multipart_file_upload_not_limited_by_data_upload_max(self): |
| 446 | + # DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file |
| 447 | + # upload whose total body exceeds the limit must still succeed. |
| 448 | + boundary = "testboundary" |
| 449 | + file_content = b"x" * 100 |
| 450 | + body = ( |
| 451 | + ( |
| 452 | + f"--{boundary}\r\n" |
| 453 | + f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' |
| 454 | + f"Content-Type: application/octet-stream\r\n" |
| 455 | + f"\r\n" |
| 456 | + ).encode() |
| 457 | + + file_content |
| 458 | + + f"\r\n--{boundary}--\r\n".encode() |
| 459 | + ) |
| 460 | + request = self.make_request( |
| 461 | + body, |
| 462 | + content_type=f"multipart/form-data; boundary={boundary}".encode(), |
| 463 | + content_length=len(body), |
| 464 | + ) |
| 465 | + with self.settings( |
| 466 | + DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10 |
| 467 | + ): |
| 468 | + files = request.FILES |
| 469 | + self.assertEqual(len(files), 1) |
| 470 | + uploaded = files["file"] |
| 471 | + self.addCleanup(uploaded.close) |
| 472 | + self.assertEqual(uploaded.read(), file_content) |
| 473 | + |
| 474 | + async def test_read_body_buffers_all_chunks(self): |
| 475 | + # read_body() consumes all chunks regardless of |
| 476 | + # DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when |
| 477 | + # HttpRequest.body is accessed. |
| 478 | + chunks = [ |
| 479 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 480 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 481 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 482 | + ] |
| 483 | + |
| 484 | + async def receive(): |
| 485 | + return chunks.pop(0) |
| 486 | + |
| 487 | + handler = ASGIHandler() |
| 488 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 489 | + body_file = await handler.read_body(receive) |
| 490 | + self.addCleanup(body_file.close) |
| 491 | + |
| 492 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 493 | + body_file.seek(0) |
| 494 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
| 495 | + |
| 496 | + async def test_read_body_multipart_not_limited(self): |
| 497 | + # All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE; |
| 498 | + # multipart size enforcement happens inside MultiPartParser, not here. |
| 499 | + chunks = [ |
| 500 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 501 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 502 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 503 | + ] |
| 504 | + |
| 505 | + async def receive(): |
| 506 | + return chunks.pop(0) |
| 507 | + |
| 508 | + handler = ASGIHandler() |
| 509 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 510 | + body_file = await handler.read_body(receive) |
| 511 | + self.addCleanup(body_file.close) |
| 512 | + |
| 513 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 514 | + body_file.seek(0) |
| 515 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
0 commit comments