summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNatalia <124304+nessita@users.noreply.github.com>2026-03-11 10:26:18 -0300
committerJacob Walls <jacobtylerwalls@gmail.com>2026-04-07 07:43:51 -0400
commited4dfda62718a0bb644b80ac8b1d3099861f2295 (patch)
tree3d6a418120a94beaf943ba8cf1bde926dee60226
parentf13c20f81b56108ac477213fa5ada2524b5e5c98 (diff)
[4.2.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE against the declared `Content-Length` header before reading. On the ASGI path, chunked requests carry no `Content-Length`, so the check evaluated to 0 and always passed regardless of the actual body size. This work adds a new check on the actual number of bytes consumed. Thanks to Superior for the report, and to Jake Howard and Jacob Walls for reviews. Backport of 953c238058c0ce387a1a41cb491bfc1875d73ad0 from main.
-rw-r--r--django/http/request.py45
-rw-r--r--docs/releases/4.2.30.txt11
-rw-r--r--tests/asgi/tests.py159
3 files changed, 204 insertions, 11 deletions
diff --git a/django/http/request.py b/django/http/request.py
index 4d1d077ec1..0b86444c4b 100644
--- a/django/http/request.py
+++ b/django/http/request.py
@@ -1,5 +1,6 @@
import codecs
import copy
+import os
from io import BytesIO
from itertools import chain
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
@@ -328,25 +329,49 @@ class HttpRequest:
"You cannot access body after reading from request's data stream"
)
- # Limit the maximum request data size that will be handled in-memory.
- if (
- settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
- and int(self.META.get("CONTENT_LENGTH") or 0)
- > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
- ):
- raise RequestDataTooBig(
- "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
- )
+ # Limit the maximum request data size that will be handled
+ # in-memory. Reject early when Content-Length is present and
+ # already exceeds the limit, avoiding reading the body at all.
+ self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0))
+
+ # Content-Length can be absent or understated (e.g.
+ # `Transfer-Encoding: chunked` on ASGI), so for seekable
+ # streams (e.g. SpooledTemporaryFile on ASGI), check the actual
+ # buffered size before reading it all into memory.
+ if hasattr(self._stream, "seekable") and self._stream.seekable():
+ stream_size = self._stream.seek(0, os.SEEK_END)
+ self._check_data_too_big(stream_size)
+ self._stream.seek(0)
+ did_check = True
+ else:
+ did_check = False
try:
- self._body = self.read()
+ if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and not did_check:
+ # Read one byte past the limit to detect an oversize body
+ # without loading it all into memory first.
+ self._body = self.read(settings.DATA_UPLOAD_MAX_MEMORY_SIZE + 1)
+ else:
+ self._body = self.read()
except OSError as e:
raise UnreadablePostError(*e.args) from e
finally:
self._stream.close()
self._stream = BytesIO(self._body)
+ if not did_check:
+ stream_size = self._stream.seek(0, os.SEEK_END)
+ self._check_data_too_big(stream_size)
+ self._stream.seek(0)
return self._body
+ def _check_data_too_big(self, length):
+ if (
+ settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
+ and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
+ ):
+ msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
+ raise RequestDataTooBig(msg)
+
def _mark_post_parse_error(self):
self._post = QueryDict()
self._files = MultiValueDict()
diff --git a/docs/releases/4.2.30.txt b/docs/releases/4.2.30.txt
index c5058d9b84..8382907068 100644
--- a/docs/releases/4.2.30.txt
+++ b/docs/releases/4.2.30.txt
@@ -56,3 +56,14 @@ may trigger repeated memory copying, potentially degrading performance.
This issue has severity "moderate" according to the :ref:`Django security
policy <security-disclosure>`.
+
+CVE-2026-33034: Potential denial-of-service vulnerability in ASGI requests via memory upload limit bypass
+=========================================================================================================
+
+ASGI requests with a missing or understated ``Content-Length`` header could
+bypass the :setting:`DATA_UPLOAD_MAX_MEMORY_SIZE` limit when reading
+``HttpRequest.body``, potentially loading an unbounded request body into
+memory and causing service degradation.
+
+This issue has severity "low" according to the :ref:`Django security policy
+<security-disclosure>`.
diff --git a/tests/asgi/tests.py b/tests/asgi/tests.py
index bf623c2083..08715026a9 100644
--- a/tests/asgi/tests.py
+++ b/tests/asgi/tests.py
@@ -1,12 +1,14 @@
import asyncio
import sys
import threading
+from io import BytesIO
from pathlib import Path
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
-from django.core.asgi import get_asgi_application
+from django.core.asgi import ASGIHandler, get_asgi_application
+from django.core.exceptions import RequestDataTooBig
from django.core.handlers.asgi import ASGIRequest
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
@@ -22,6 +24,7 @@ from django.utils.http import http_date
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
+TOO_MUCH_DATA_MSG = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
@override_settings(ROOT_URLCONF="asgi.urls")
@@ -356,3 +359,157 @@ class ASGITest(SimpleTestCase):
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
+
+
+class DataUploadMaxMemorySizeASGITests(SimpleTestCase):
+ def make_request(
+ self,
+ body,
+ content_type=b"application/octet-stream",
+ content_length=None,
+ stream=None,
+ ):
+ scope = AsyncRequestFactory()._base_scope(method="POST", path="/")
+ scope["headers"] = [(b"content-type", content_type)]
+ if content_length is not None:
+ scope["headers"].append((b"content-length", str(content_length).encode()))
+ return ASGIRequest(scope, stream if stream is not None else BytesIO(body))
+
+ def test_body_size_not_exceeded_without_content_length(self):
+ body = b"x" * 5
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
+ self.assertEqual(self.make_request(body).body, body)
+
+ def test_body_size_exceeded_without_content_length(self):
+ request = self.make_request(b"x" * 10)
+ with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
+ request.body
+
+ def test_body_size_check_fires_before_read(self):
+ # The seekable size check rejects oversized bodies before reading
+ # them into memory (i.e. before calling self.read()).
+ class TrackingBytesIO(BytesIO):
+ calls = []
+
+ def read(self, *args, **kwargs):
+ self.calls.append((args, kwargs))
+ return super().read(*args, **kwargs)
+
+ stream = TrackingBytesIO(b"x" * 10)
+ request = self.make_request(b"x" * 10, stream=stream)
+ with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
+ request.body
+
+ self.assertEqual(stream.calls, [])
+
+ def test_post_size_exceeded_without_content_length(self):
+ request = self.make_request(
+ b"a=" + b"x" * 10,
+ content_type=b"application/x-www-form-urlencoded",
+ )
+ with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=5):
+ request.POST
+
+ def test_no_limit(self):
+ body = b"x" * 100
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
+ self.assertEqual(self.make_request(body).body, body)
+
+ async def test_read_body_no_limit(self):
+ chunks = [
+ {"type": "http.request", "body": b"x" * 100, "more_body": True},
+ {"type": "http.request", "body": b"x" * 100, "more_body": False},
+ ]
+
+ async def receive():
+ return chunks.pop(0)
+
+ handler = ASGIHandler()
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=None):
+ body_file = await handler.read_body(receive)
+ self.addCleanup(body_file.close)
+
+ body_file.seek(0)
+ self.assertEqual(body_file.read(), b"x" * 200)
+
+ def test_non_multipart_body_size_enforced(self):
+ # DATA_UPLOAD_MAX_MEMORY_SIZE is enforced on non-multipart bodies.
+ request = self.make_request(b"x" * 100)
+ with self.assertRaisesMessage(RequestDataTooBig, TOO_MUCH_DATA_MSG):
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=10):
+ request.body
+
+ def test_multipart_file_upload_not_limited_by_data_upload_max(self):
+ # DATA_UPLOAD_MAX_MEMORY_SIZE applies to non-file fields only; a file
+ # upload whose total body exceeds the limit must still succeed.
+ boundary = "testboundary"
+ file_content = b"x" * 100
+ body = (
+ (
+ f"--{boundary}\r\n"
+ f'Content-Disposition: form-data; name="file"; filename="test.txt"\r\n'
+ f"Content-Type: application/octet-stream\r\n"
+ f"\r\n"
+ ).encode()
+ + file_content
+ + f"\r\n--{boundary}--\r\n".encode()
+ )
+ request = self.make_request(
+ body,
+ content_type=f"multipart/form-data; boundary={boundary}".encode(),
+ content_length=len(body),
+ )
+ with self.settings(
+ DATA_UPLOAD_MAX_MEMORY_SIZE=10, FILE_UPLOAD_MAX_MEMORY_SIZE=10
+ ):
+ files = request.FILES
+ self.assertEqual(len(files), 1)
+ uploaded = files["file"]
+ self.addCleanup(uploaded.close)
+ self.assertEqual(uploaded.read(), file_content)
+
+ async def test_read_body_buffers_all_chunks(self):
+ # read_body() consumes all chunks regardless of
+ # DATA_UPLOAD_MAX_MEMORY_SIZE; the limit is enforced later when
+ # HttpRequest.body is accessed.
+ chunks = [
+ {"type": "http.request", "body": b"x" * 10, "more_body": True},
+ {"type": "http.request", "body": b"y" * 10, "more_body": True},
+ {"type": "http.request", "body": b"z" * 10, "more_body": False},
+ ]
+
+ async def receive():
+ return chunks.pop(0)
+
+ handler = ASGIHandler()
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
+ body_file = await handler.read_body(receive)
+ self.addCleanup(body_file.close)
+
+ self.assertEqual(len(chunks), 0) # All chunks were consumed.
+ body_file.seek(0)
+ self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)
+
+ async def test_read_body_multipart_not_limited(self):
+ # All chunks are consumed regardless of DATA_UPLOAD_MAX_MEMORY_SIZE;
+ # multipart size enforcement happens inside MultiPartParser, not here.
+ chunks = [
+ {"type": "http.request", "body": b"x" * 10, "more_body": True},
+ {"type": "http.request", "body": b"y" * 10, "more_body": True},
+ {"type": "http.request", "body": b"z" * 10, "more_body": False},
+ ]
+
+ async def receive():
+ return chunks.pop(0)
+
+ handler = ASGIHandler()
+ with self.settings(DATA_UPLOAD_MAX_MEMORY_SIZE=15):
+ body_file = await handler.read_body(receive)
+ self.addCleanup(body_file.close)
+
+ self.assertEqual(len(chunks), 0) # All chunks were consumed.
+ body_file.seek(0)
+ self.assertEqual(body_file.read(), b"x" * 10 + b"y" * 10 + b"z" * 10)