|
3 | 3 | import tempfile |
4 | 4 | import threading |
5 | 5 | import time |
| 6 | +from io import BytesIO |
6 | 7 | from pathlib import Path |
7 | 8 | from unittest.mock import patch |
8 | 9 |
|
|
31 | 32 | from .urls import sync_waiter, test_filename |
32 | 33 |
|
33 | 34 | TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static" |
| 35 | +TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." |
34 | 36 |
|
35 | 37 |
|
36 | 38 | class SignalHandler: |
@@ -800,3 +802,166 @@ def test_multiple_cookie_headers_http2(self): |
800 | 802 | request = ASGIRequest(scope, None) |
801 | 803 | self.assertEqual(request.META["HTTP_COOKIE"], "a=abc; b=def; c=ghi") |
802 | 804 | self.assertEqual(request.COOKIES, {"a": "abc", "b": "def", "c": "ghi"}) |
| 805 | + |
| 806 | + |
| 807 | +class DataUploadMaxMemorySizeASGITests(SimpleTestCase): |
| 808 | + |
| 809 | + def make_request( |
| 810 | + self, |
| 811 | + body, |
| 812 | + content_type=b"application/octet-stream", |
| 813 | + content_length=None, |
| 814 | + stream=None, |
| 815 | + ): |
| 816 | + scope = AsyncRequestFactory()._base_scope(method="POST", path="/") |
| 817 | + scope["headers"] = [(b"content-type", content_type)] |
| 818 | + if content_length is not None: |
| 819 | + scope["headers"].append((b"content-length", str(content_length).encode())) |
| 820 | + return ASGIRequest(scope, stream if stream is not None else BytesIO(body)) |
| 821 | + |
| 822 | + def test_body_size_not_exceeded_without_content_length(self): |
| 823 | + body = b"x" * 5 |
| 824 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): |
| 825 | + self.assertEqual(self.make_request(body).body, body) |
| 826 | + |
| 827 | + def test_body_size_exceeded_without_content_length(self): |
| 828 | + request = self.make_request(b"x" * 10) |
| 829 | + with ( |
| 830 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 831 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 832 | + ): |
| 833 | + request.body |
| 834 | + |
| 835 | + def test_body_size_check_fires_before_read(self): |
| 836 | + # The seekable size check rejects oversized bodies before reading |
| 837 | + # them into memory (i.e. before calling self.read()). |
| 838 | + class TrackingBytesIO(BytesIO): |
| 839 | + calls = [] |
| 840 | + |
| 841 | + def read(self, *args, **kwargs): |
| 842 | + self.calls.append((args, kwargs)) |
| 843 | + return super().read(*args, **kwargs) |
| 844 | + |
| 845 | + stream = TrackingBytesIO(b"x" * 10) |
| 846 | + request = self.make_request(b"x" * 10, stream=stream) |
| 847 | + with ( |
| 848 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 849 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 850 | + ): |
| 851 | + request.body |
| 852 | + |
| 853 | + self.assertEqual(stream.calls, []) |
| 854 | + |
| 855 | + def test_post_size_exceeded_without_content_length(self): |
| 856 | + request = self.make_request( |
| 857 | + b"a=" + b"x" * 10, |
| 858 | + content_type=b"application/x-www-form-urlencoded", |
| 859 | + ) |
| 860 | + with ( |
| 861 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), |
| 862 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 863 | + ): |
| 864 | + request.POST |
| 865 | + |
| 866 | + def test_no_limit(self): |
| 867 | + body = b"x" * 100 |
| 868 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 869 | + self.assertEqual(self.make_request(body).body, body) |
| 870 | + |
| 871 | + async def test_read_body_no_limit(self): |
| 872 | + chunks = [ |
| 873 | + {"type": "http.request", "body": b"x" * 100, "more_body": True}, |
| 874 | + {"type": "http.request", "body": b"x" * 100, "more_body": False}, |
| 875 | + ] |
| 876 | + |
| 877 | + async def receive(): |
| 878 | + return chunks.pop(0) |
| 879 | + |
| 880 | + handler = ASGIHandler() |
| 881 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): |
| 882 | + body_file = await handler.read_body(receive) |
| 883 | + self.addCleanup(body_file.close) |
| 884 | + |
| 885 | + body_file.seek(0) |
| 886 | + self.assertEqual(body_file.read(), b"x" * 200) |
| 887 | + |
| 888 | + def test_non_multipart_body_size_enforced(self): |
| 889 | + # DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies. |
| 890 | + request = self.make_request(b"x" * 100) |
| 891 | + with ( |
| 892 | + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10), |
| 893 | + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), |
| 894 | + ): |
| 895 | + request.body |
| 896 | + |
| 897 | + def test_multipart_file_upload_not_limited_by_data_upload_max(self): |
| 898 | + # DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file |
| 899 | + # upload whose total body exceeds the limit must still succeed. |
| 900 | + boundary = "testboundary" |
| 901 | + file_content = b"x" * 100 |
| 902 | + body = ( |
| 903 | + ( |
| 904 | + f"--{boundary}\r\n" |
| 905 | + f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' |
| 906 | + f"Content-Type: application/octet-stream\r\n" |
| 907 | + f"\r\n" |
| 908 | + ).encode() |
| 909 | + + file_content |
| 910 | + + f"\r\n--{boundary}--\r\n".encode() |
| 911 | + ) |
| 912 | + request = self.make_request( |
| 913 | + body, |
| 914 | + content_type=f"multipart/form-data; boundary={boundary}".encode(), |
| 915 | + content_length=len(body), |
| 916 | + ) |
| 917 | + with self.settings( |
| 918 | + DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10 |
| 919 | + ): |
| 920 | + files = request.FILES |
| 921 | + self.assertEqual(len(files), 1) |
| 922 | + uploaded = files["file"] |
| 923 | + self.addCleanup(uploaded.close) |
| 924 | + self.assertEqual(uploaded.read(), file_content) |
| 925 | + |
| 926 | + async def test_read_body_buffers_all_chunks(self): |
| 927 | + # read_body() consumes all chunks regardless of |
| 928 | + # DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when |
| 929 | + # HttpRequest.body is accessed. |
| 930 | + chunks = [ |
| 931 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 932 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 933 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 934 | + ] |
| 935 | + |
| 936 | + async def receive(): |
| 937 | + return chunks.pop(0) |
| 938 | + |
| 939 | + handler = ASGIHandler() |
| 940 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 941 | + body_file = await handler.read_body(receive) |
| 942 | + self.addCleanup(body_file.close) |
| 943 | + |
| 944 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 945 | + body_file.seek(0) |
| 946 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
| 947 | + |
| 948 | + async def test_read_body_multipart_not_limited(self): |
| 949 | + # All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE; |
| 950 | + # multipart size enforcement happens inside MultiPartParser, not here. |
| 951 | + chunks = [ |
| 952 | + {"type": "http.request", "body": b"x" * 10, "more_body": True}, |
| 953 | + {"type": "http.request", "body": b"y" * 10, "more_body": True}, |
| 954 | + {"type": "http.request", "body": b"z" * 10, "more_body": False}, |
| 955 | + ] |
| 956 | + |
| 957 | + async def receive(): |
| 958 | + return chunks.pop(0) |
| 959 | + |
| 960 | + handler = ASGIHandler() |
| 961 | + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): |
| 962 | + body_file = await handler.read_body(receive) |
| 963 | + self.addCleanup(body_file.close) |
| 964 | + |
| 965 | + self.assertEqual(len(chunks), 0) # All chunks were consumed. |
| 966 | + body_file.seek(0) |
| 967 | + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
0 commit comments