|
2 | 2 | import sys |
3 | 3 | import threading |
4 | 4 | import time |
| 5 | +from io import BytesIO |
5 | 6 | from pathlib import Path |
6 | 7 |
|
7 | 8 | from asgiref.sync import sync_to_async |
|
29 | 30 | from .urls import sync_waiter, test_filename |
30 | 31 |
|
31 | 32 | TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static" |
| 33 | +TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." |
32 | 34 |
|
33 | 35 |
|
34 | 36 | class SignalHandler: |
@@ -696,3 +698,166 @@ async def test_streaming_disconnect(self): |
696 | 698 | # 'last\n' isn't sent. |
697 | 699 | with self.assertRaises(asyncio.TimeoutError): |
698 | 700 | await communicator.receive_output(timeout=0.2) |
| 701 | + |
| 702 | + |
| 703 | +class DataUploadMaxMemorySizeASGITests(SimpleTestCase): |
| 704 | + |
| 705 | + def make_request( |
| 706 | + self, |
| 707 | + body, |
| 708 | + content_type=b"application/octet-stream", |
| 709 | + content_length=None, |
| 710 | + stream=None, |
| 711 | + ): |
| 712 | + scope = AsyncRequestFactory()._base_scope(method="POST", path="/") |
| 713 | + scope["headers"] = [(b"content-type", content_type)] |
| 714 | + if content_length is not None: |
| 715 | + scope["headers"].append((b"content-length", str(content_length).encode())) |
| 716 | + return ASGIRequest(scope, stream if stream is not None else BytesIO(body)) |
| 717 | + |
| 718 | + def test_body_size_not_exceeded_without_content_length(self): |
| 719 | + body = b"x" * 5 |
| 720 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 721 | + self.assertEqual(self.make_request(body).body, body) |
| 722 | + |
| 723 | + def test_body_size_exceeded_without_content_length(self): |
| 724 | + request = self.make_request(b"x" * 10) |
| 725 | + with ( |
| 726 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 727 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 728 | + ): |
| 729 | + request.body |
| 730 | + |
| 731 | + def test_body_size_check_fires_before_read(self): |
| 732 | + # The seekable size check rejects oversized bodies before reading |
| 733 | + # them into memory (i.e. before calling self.read()). |
| 734 | + class TrackingBytesIO(BytesIO): |
| 735 | + calls = [] |
| 736 | + |
| 737 | + def read(self, *args, **kwargs): |
| 738 | + self.calls.append((args, kwargs)) |
| 739 | + return super().read(*args, **kwargs) |
| 740 | + |
| 741 | + stream = TrackingBytesIO(b"x" * 10) |
| 742 | + request = self.make_request(b"x" * 10, stream=stream) |
| 743 | + with ( |
| 744 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 745 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 746 | + ): |
| 747 | + request.body |
| 748 | + |
| 749 | + self.assertEqual(stream.calls, []) |
| 750 | + |
| 751 | + def test_post_size_exceeded_without_content_length(self): |
| 752 | + request = self.make_request( |
| 753 | + b"a=" + b"x" * 10, |
| 754 | + content_type=b"application/x-www-form-urlencoded", |
| 755 | + ) |
| 756 | + with ( |
| 757 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 758 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 759 | + ): |
| 760 | + request.POST |
| 761 | + |
| 762 | + def test_no_limit(self): |
| 763 | + body = b"x" * 100 |
| 764 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 765 | + self.assertEqual(self.make_request(body).body, body) |
| 766 | + |
| 767 | + async def test_read_body_no_limit(self): |
| 768 | + chunks = [ |
| 769 | + {"type": "http.request", "body": b"x" * 100, "more_body": True}, |
| 770 | + {"type": "http.request", "body": b"x" * 100, "more_body": False}, |
| 771 | + ] |
| 772 | + |
| 773 | + async def receive(): |
| 774 | + return chunks.pop(0) |
| 775 | + |
| 776 | + handler = ASGIHandler() |
| 777 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 778 | + body_file = await handler.read_body(receive) |
| 779 | + self.addCleanup(body_file.close) |
| 780 | + |
| 781 | + body_file.seek(0) |
| 782 | + self.assertEqual(body_file.read(), b"x" * 200) |
| 783 | + |
| 784 | + def test_non_multipart_body_size_enforced(self): |
| 785 | + # DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies. |
| 786 | + request = self.make_request(b"x" * 100) |
| 787 | + with ( |
| 788 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10), |
| 789 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 790 | + ): |
| 791 | + request.body |
| 792 | + |
| 793 | + def test_multipart_file_upload_not_limited_by_data_upload_max(self): |
| 794 | + # DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file |
| 795 | + # upload whose total body exceeds the limit must still succeed. |
| 796 | + boundary = "testboundary" |
| 797 | + file_content = b"x" * 100 |
| 798 | + body = ( |
| 799 | + ( |
| 800 | + f"--{boundary}\r\n" |
| 801 | + f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' |
| 802 | + f"Content-Type: application/octet-stream\r\n" |
| 803 | + f"\r\n" |
| 804 | + ).encode() |
| 805 | + + file_content |
| 806 | + + f"\r\n--{boundary}--\r\n".encode() |
| 807 | + ) |
| 808 | + request = self.make_request( |
| 809 | + body, |
| 810 | + content_type=f"multipart/form-data; boundary={boundary}".encode(), |
| 811 | + content_length=len(body), |
| 812 | + ) |
| 813 | + with self.settings( |
| 814 | + DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10 |
| 815 | + ): |
| 816 | + files = request.FILES |
| 817 | + self.assertEqual(len(files), 1) |
| 818 | + uploaded = files["file"] |
| 819 | + self.addCleanup(uploaded.close) |
| 820 | + self.assertEqual(uploaded.read(), file_content) |
| 821 | + |
| 822 | + async def test_read_body_buffers_all_chunks(self): |
| 823 | + # read_body() consumes all chunks regardless of |
| 824 | + # DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when |
| 825 | + # HttpRequest.body is accessed. |
| 826 | + chunks = [ |
| 827 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 828 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 829 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 830 | + ] |
| 831 | + |
| 832 | + async def receive(): |
| 833 | + return chunks.pop(0) |
| 834 | + |
| 835 | + handler = ASGIHandler() |
| 836 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 837 | + body_file = await handler.read_body(receive) |
| 838 | + self.addCleanup(body_file.close) |
| 839 | + |
| 840 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 841 | + body_file.seek(0) |
| 842 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
| 843 | + |
| 844 | + async def test_read_body_multipart_not_limited(self): |
| 845 | + # All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE; |
| 846 | + # multipart size enforcement happens inside MultiPartParser, not here. |
| 847 | + chunks = [ |
| 848 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 849 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 850 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 851 | + ] |
| 852 | + |
| 853 | + async def receive(): |
| 854 | + return chunks.pop(0) |
| 855 | + |
| 856 | + handler = ASGIHandler() |
| 857 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 858 | + body_file = await handler.read_body(receive) |
| 859 | + self.addCleanup(body_file.close) |
| 860 | + |
| 861 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 862 | + body_file.seek(0) |
| 863 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
0 commit comments