diff options
| author | Natalia <124304+nessita@users.noreply.github.com> | 2026-03-11 10:26:18 -0300 |
|---|---|---|
| committer | Jacob Walls <jacobtylerwalls@gmail.com> | 2026-04-07 07:34:17 -0400 |
| commit | 49e1e2b548999a35a025f9682598946bda9e9921 (patch) | |
| tree | eba2c042d949a55df60f8d56602fbd321ac5cda1 /django/http | |
| parent | 0b467893bdde69a2d23034338e76021a1e4f4322 (diff) | |
[5.2.x] Fixed CVE-2026-33034 -- Enforced DATA_UPLOAD_MAX_MEMORY_SIZE on body size in ASGI requests.
The `body` property in `HttpRequest` checks DATA_UPLOAD_MAX_MEMORY_SIZE
against the declared `Content-Length` header before reading. On the ASGI
path, chunked requests carry no `Content-Length`, so the check evaluated
to 0 and always passed regardless of the actual body size.
This work adds a new check on the actual number of bytes consumed.
Thanks to Superior for the report, and to Jake Howard and Jacob Walls
for reviews.
Backport of 953c238058c0ce387a1a41cb491bfc1875d73ad0 from main.
Diffstat (limited to 'django/http')
| -rw-r--r-- | django/http/request.py | 45 |
1 files changed, 35 insertions, 10 deletions
diff --git a/django/http/request.py b/django/http/request.py index daaf3748cc..7771f507d3 100644 --- a/django/http/request.py +++ b/django/http/request.py @@ -1,6 +1,7 @@ import codecs import copy import operator +import os from io import BytesIO from itertools import chain from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlsplit @@ -380,25 +381,49 @@ class HttpRequest: "You cannot access body after reading from request's data stream" ) - # Limit the maximum request data size that will be handled in-memory. - if ( - settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None - and int(self.META.get("CONTENT_LENGTH") or 0) - > settings.DATA_UPLOAD_MAX_MEMORY_SIZE - ): - raise RequestDataTooBig( - "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." - ) + # Limit the maximum request data size that will be handled + # in-memory. Reject early when Content-Length is present and + # already exceeds the limit, avoiding reading the body at all. + self._check_data_too_big(int(self.META.get("CONTENT_LENGTH") or 0)) + + # Content-Length can be absent or understated (e.g. + # `Transfer-Encoding: chunked` on ASGI), so for seekable + # streams (e.g. SpooledTemporaryFile on ASGI), check the actual + # buffered size before reading it all into memory. + if hasattr(self._stream, "seekable") and self._stream.seekable(): + stream_size = self._stream.seek(0, os.SEEK_END) + self._check_data_too_big(stream_size) + self._stream.seek(0) + did_check = True + else: + did_check = False try: - self._body = self.read() + if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and not did_check: + # Read one byte past the limit to detect an oversize body + # without loading it all into memory first. + self._body = self.read(settings.DATA_UPLOAD_MAX_MEMORY_SIZE + 1) + else: + self._body = self.read() except OSError as e: raise UnreadablePostError(*e.args) from e finally: self._stream.close() self._stream = BytesIO(self._body) + if not did_check: + stream_size = self._stream.seek(0, os.SEEK_END) + self._check_data_too_big(stream_size) + self._stream.seek(0) return self._body + def _check_data_too_big(self, length): + if ( + settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None + and length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE + ): + msg = "Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE." + raise RequestDataTooBig(msg) + def _mark_post_parse_error(self): self._post = QueryDict() self._files = MultiValueDict() |
