diff options
| author | Natalia <124304+nessita@users.noreply.github.com> | 2026-03-11 10:26:18 -0300 |
|---|---|---|
| committer | Jacob Walls <jacobtylerwalls@gmail.com> | 2026-04-07 07:12:27 -0400 |
| commit | 953c238058c0ce387a1a41cb491bfc1875d73ad0 (patch) | |
| tree | e340c882d507601c37f8696df3076aba53fe35a6 /django | |
| parent | 7e9885f99cee771b51692fadc5592bdbf19641aa (diff) | |
Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE
against the declared `Content-Length` header before reading. On the ASGI
path, chunked requests carry no `Content-Length`, so the check evaluated
to 0 and always passed regardless of the actual body size.
This work adds a new check on the actual number of bytes consumed.
Thanks to Superior for the report, and to Jake Howard and Jacob Walls
for reviews.
Diffstat (limited to 'django')
| -rw-r--r-- | django/http/request.py | 30 |
1 files changed, 21 insertions, 9 deletions
diff --git a/django/http/request.py b/django/http/request.py index 573ae2b229..f871ea15e8 100644 --- a/django/http/request.py +++ b/django/http/request.py @@ -1,6 +1,7 @@ import codecs import copy import operator +import os from io import BytesIO from itertools import chain from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit @@ -401,15 +402,18 @@ class HttpRequest: ) # Limit the maximum request data size that will be handled - # in-memory. - if ( - settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None - and int(self.META.get("CONTENT_LENGTH") or 0) - > settings.DATA_UPLOAD_MAX_MEMORY_SIZE - ): - raise RequestDataTooBig( - "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." - ) + # in-memory. Reject early when Content-Length is present and + # already exceeds the limit, avoiding reading the body at all. + self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0)) + + # Content-Length can be absent or understated (e.g. + # `Transfer-Encoding: chunked` on ASGI), so for seekable + # streams (e.g. SpooledTemporaryFile on ASGI), check the actual + # buffered size before reading it all into memory. + if self._stream.seekable(): + stream_size = self._stream.seek(0, os.SEEK_END) + self._check_data_too_big(stream_size) + self._stream.seek(0) try: self._body = self.read() @@ -420,6 +424,14 @@ class HttpRequest: self._stream = BytesIO(self._body) return self._body + def _check_data_too_big(self, length): + if ( + settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None + and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE + ): + msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." + raise RequestDataTooBig(msg) + def _mark_post_parse_error(self): self._post = QueryDict() self._files = MultiValueDict() |
