summaryrefslogtreecommitdiff
path: root/django/http/request.py
diff options
context:
space:
mode:
authorNatalia <124304+nessita@users.noreply.github.com>2026-03-11 10:26:18 -0300
committerJacob Walls <jacobtylerwalls@gmail.com>2026-04-07 07:22:54 -0400
commit393dbc53e848876fdba92fbf02e10ee6a6eace6b (patch)
treef9f57d814549b88ccec34c4be0af429d855d7677 /django/http/request.py
parent0910af60468216c856dfbcac1177372c225deb76 (diff)
[6.0.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE against the declared `Content-Length` header before reading. On the ASGI path, chunked requests carry no `Content-Length`, so the check evaluated to 0 and always passed regardless of the actual body size. This work adds a new check on the actual number of bytes consumed. Thanks to Superior for the report, and to Jake Howard and Jacob Walls for reviews. Backport of 953c238058c0ce387a1a41cb491bfc1875d73ad0 from main.
Diffstat (limited to 'django/http/request.py')
-rw-r--r--django/http/request.py30
1 files changed, 21 insertions, 9 deletions
diff --git a/django/http/request.py b/django/http/request.py
index c8adde768d..6685525caa 100644
--- a/django/http/request.py
+++ b/django/http/request.py
@@ -1,6 +1,7 @@
import codecs
import copy
import operator
+import os
from io import BytesIO
from itertools import chain
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit
@@ -385,15 +386,18 @@ class HttpRequest:
)
# Limit the maximum request data size that will be handled
- # in-memory.
- if (
- settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
- and int(self.META.get("CONTENT_LENGTH") or 0)
- > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
- ):
- raise RequestDataTooBig(
- "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
- )
+ # in-memory. Reject early when Content-Length is present and
+ # already exceeds the limit, avoiding reading the body at all.
+ self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0))
+
+ # Content-Length can be absent or understated (e.g.
+ # `Transfer-Encoding: chunked` on ASGI), so for seekable
+ # streams (e.g. SpooledTemporaryFile on ASGI), check the actual
+ # buffered size before reading it all into memory.
+ if self._stream.seekable():
+ stream_size = self._stream.seek(0, os.SEEK_END)
+ self._check_data_too_big(stream_size)
+ self._stream.seek(0)
try:
self._body = self.read()
@@ -404,6 +408,14 @@ class HttpRequest:
self._stream = BytesIO(self._body)
return self._body
+ def _check_data_too_big(self, length):
+ if (
+ settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
+ and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
+ ):
+ msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
+ raise RequestDataTooBig(msg)
+
def _mark_post_parse_error(self):
self._post = QueryDict()
self._files = MultiValueDict()