diff options
| author | Natalia <124304+nessita@users.noreply.github.com> | 2026-03-11 10:26:18 -0300 |
|---|---|---|
| committer | Jacob Walls <jacobtylerwalls@gmail.com> | 2026-04-07 07:34:17 -0400 |
| commit | 49e1e2b548999a35a025f9682598946bda9e9921 (patch) | |
| tree | eba2c042d949a55df60f8d56602fbd321ac5cda1 | |
| parent | 0b467893bdde69a2d23034338e76021a1e4f4322 (diff) | |
[5.2.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE
against the declared `Content-Length` header before reading. On the ASGI
path, chunked requests carry no `Content-Length`, so the check evaluated
to 0 and always passed regardless of the actual body size.
This work adds a new check on the actual number of bytes consumed.
Thanks to Superior for the report, and to Jake Howard and Jacob Walls
for reviews.
Backport of 953c238058c0ce387a1a41cb491bfc1875d73ad0 from main.
| -rw-r--r-- | django/http/request.py | 45 | ||||
| -rw-r--r-- | docs/releases/4.2.30.txt | 11 | ||||
| -rw-r--r-- | docs/releases/5.2.13.txt | 11 | ||||
| -rw-r--r-- | tests/asgi/tests.py | 165 |
4 files changed, 222 insertions, 10 deletions
diff --git a/django/http/request.py b/django/http/request.py index daaf3748cc..7771f507d3 100644 --- a/django/http/request.py +++ b/django/http/request.py @@ -1,6 +1,7 @@ import codecs import copy import operator +import os from io import BytesIO from itertools import chain from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit @@ -380,25 +381,49 @@ class HttpRequest: "You cannot access body after reading from request's data stream" ) - # Limit the maximum request data size that will be handled in-memory. - if ( - settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None - and int(self.META.get("CONTENT_LENGTH") or 0) - > settings.DATA_UPLOAD_MAX_MEMORY_SIZE - ): - raise RequestDataTooBig( - "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." - ) + # Limit the maximum request data size that will be handled + # in-memory. Reject early when Content-Length is present and + # already exceeds the limit, avoiding reading the body at all. + self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0)) + + # Content-Length can be absent or understated (e.g. + # `Transfer-Encoding: chunked` on ASGI), so for seekable + # streams (e.g. SpooledTemporaryFile on ASGI), check the actual + # buffered size before reading it all into memory. + if hasattr(self._stream, "seekable") and self._stream.seekable(): + stream_size = self._stream.seek(0, os.SEEK_END) + self._check_data_too_big(stream_size) + self._stream.seek(0) + did_check = True + else: + did_check = False try: - self._body = self.read() + if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and not did_check: + # Read one byte past the limit to detect an oversize body + # without loading it all into memory first. + self._body = self.read(settings.DATA_UPLOAD_MAX_MEMORY_SIZE + 1) + else: + self._body = self.read() except OSError as e: raise UnreadablePostError(*e.args) from e finally: self._stream.close() self._stream = BytesIO(self._body) + if not did_check: + stream_size = self._stream.seek(0, os.SEEK_END) + self._check_data_too_big(stream_size) + self._stream.seek(0) return self._body + def _check_data_too_big(self, length): + if ( + settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None + and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE + ): + msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." + raise RequestDataTooBig(msg) + def _mark_post_parse_error(self): self._post = QueryDict() self._files = MultiValueDict() diff --git a/docs/releases/4.2.30.txt b/docs/releases/4.2.30.txt index c5058d9b84..8382907068 100644 --- a/docs/releases/4.2.30.txt +++ b/docs/releases/4.2.30.txt @@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance. This issue has severity "moderate" according to the :ref:`Django security policy <security-disclosure>`. + +CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass +========================================================================================================= + +ASGI requests with a missing or understated ``Content-Length`` header could +bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading +``HttpRequest.body``, potentially loading an unbounded request body into +memory and causing service degradation. + +This issue has severity "low" according to the :ref:`Django security policy +<security-disclosure>`. diff --git a/docs/releases/5.2.13.txt b/docs/releases/5.2.13.txt index 46303da3c7..9b7ce3155a 100644 --- a/docs/releases/5.2.13.txt +++ b/docs/releases/5.2.13.txt @@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance. This issue has severity "moderate" according to the :ref:`Django security policy <security-disclosure>`. + +CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass +========================================================================================================= + +ASGI requests with a missing or understated ``Content-Length`` header could +bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading +``HttpRequest.body``, potentially loading an unbounded request body into +memory and causing service degradation. + +This issue has severity "low" according to the :ref:`Django security policy +<security-disclosure>`. diff --git a/tests/asgi/tests.py b/tests/asgi/tests.py index 880eb0784d..d97022fc92 100644 --- a/tests/asgi/tests.py +++ b/tests/asgi/tests.py @@ -2,6 +2,7 @@ import asyncio import sys import threading import time +from io import BytesIO from pathlib import Path from asgiref.sync import sync_to_async @@ -29,6 +30,7 @@ from django.views.decorators.csrf import csrf_exempt from .urls import sync_waiter, test_filename TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static" +TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." class SignalHandler: @@ -696,3 +698,166 @@ class ASGITest(SimpleTestCase): # 'last\n' isn't sent. with self.assertRaises(asyncio.TimeoutError): await communicator.receive_output(timeout=0.2) + + +class DataUploadMaxMemorySizeASGITests(SimpleTestCase): + + def make_request( + self, + body, + content_type=b"application/octet-stream", + content_length=None, + stream=None, + ): + scope = AsyncRequestFactory()._base_scope(method="POST", path="/") + scope["headers"] = [(b"content-type", content_type)] + if content_length is not None: + scope["headers"].append((b"content-length", str(content_length).encode())) + return ASGIRequest(scope, stream if stream is not None else BytesIO(body)) + + def test_body_size_not_exceeded_without_content_length(self): + body = b"x" * 5 + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5): + self.assertEqual(self.make_request(body).body, body) + + def test_body_size_exceeded_without_content_length(self): + request = self.make_request(b"x" * 10) + with ( + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), + ): + request.body + + def test_body_size_check_fires_before_read(self): + # The seekable size check rejects oversized bodies before reading + # them into memory (i.e. before calling self.read()). + class TrackingBytesIO(BytesIO): + calls = [] + + def read(self, *args, **kwargs): + self.calls.append((args, kwargs)) + return super().read(*args, **kwargs) + + stream = TrackingBytesIO(b"x" * 10) + request = self.make_request(b"x" * 10, stream=stream) + with ( + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), + ): + request.body + + self.assertEqual(stream.calls, []) + + def test_post_size_exceeded_without_content_length(self): + request = self.make_request( + b"a=" + b"x" * 10, + content_type=b"application/x-www-form-urlencoded", + ) + with ( + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5), + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), + ): + request.POST + + def test_no_limit(self): + body = b"x" * 100 + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): + self.assertEqual(self.make_request(body).body, body) + + async def test_read_body_no_limit(self): + chunks = [ + {"type": "http.request", "body": b"x" * 100, "more_body": True}, + {"type": "http.request", "body": b"x" * 100, "more_body": False}, + ] + + async def receive(): + return chunks.pop(0) + + handler = ASGIHandler() + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None): + body_file = await handler.read_body(receive) + self.addCleanup(body_file.close) + + body_file.seek(0) + self.assertEqual(body_file.read(), b"x" * 200) + + def test_non_multipart_body_size_enforced(self): + # DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies. + request = self.make_request(b"x" * 100) + with ( + self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10), + self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG), + ): + request.body + + def test_multipart_file_upload_not_limited_by_data_upload_max(self): + # DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file + # upload whose total body exceeds the limit must still succeed. + boundary = "testboundary" + file_content = b"x" * 100 + body = ( + ( + f"--{boundary}\r\n" + f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n' + f"Content-Type: application/octet-stream\r\n" + f"\r\n" + ).encode() + + file_content + + f"\r\n--{boundary}--\r\n".encode() + ) + request = self.make_request( + body, + content_type=f"multipart/form-data; boundary={boundary}".encode(), + content_length=len(body), + ) + with self.settings( + DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10 + ): + files = request.FILES + self.assertEqual(len(files), 1) + uploaded = files["file"] + self.addCleanup(uploaded.close) + self.assertEqual(uploaded.read(), file_content) + + async def test_read_body_buffers_all_chunks(self): + # read_body() consumes all chunks regardless of + # DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when + # HttpRequest.body is accessed. + chunks = [ + {"type": "http.request", "body": b"x" * 10, "more_body": True}, + {"type": "http.request", "body": b"y" * 10, "more_body": True}, + {"type": "http.request", "body": b"z" * 10, "more_body": False}, + ] + + async def receive(): + return chunks.pop(0) + + handler = ASGIHandler() + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): + body_file = await handler.read_body(receive) + self.addCleanup(body_file.close) + + self.assertEqual(len(chunks), 0) # All chunks were consumed. + body_file.seek(0) + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) + + async def test_read_body_multipart_not_limited(self): + # All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE; + # multipart size enforcement happens inside MultiPartParser, not here. + chunks = [ + {"type": "http.request", "body": b"x" * 10, "more_body": True}, + {"type": "http.request", "body": b"y" * 10, "more_body": True}, + {"type": "http.request", "body": b"z" * 10, "more_body": False}, + ] + + async def receive(): + return chunks.pop(0) + + handler = ASGIHandler() + with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15): + body_file = await handler.read_body(receive) + self.addCleanup(body_file.close) + + self.assertEqual(len(chunks), 0) # All chunks were consumed. + body_file.seek(0) + self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10) |
