summaryrefslogtreecommitdiff
path: root/django/http/multipartparser.py
diff options
context:
space:
mode:
authorAndre Cruz <andre.cruz@co.sapo.pt>2015-01-07 18:41:29 +0000
committerTim Graham <timograham@gmail.com>2016-05-12 10:17:52 -0400
commit929684d6ee0efb5afad51dc584489d0437d2451b (patch)
tree44714f4502df964790f9db9a4751c843fa49a997 /django/http/multipartparser.py
parent4065f429f559533f11abbab40624a61027a52b61 (diff)
Fixed #21231 -- Enforced a max size for GET/POST values read into memory.
Thanks Tom Christie for review.
Diffstat (limited to 'django/http/multipartparser.py')
-rw-r--r--django/http/multipartparser.py37
1 files changed, 34 insertions, 3 deletions
diff --git a/django/http/multipartparser.py b/django/http/multipartparser.py
index b54eaca976..1babc72c98 100644
--- a/django/http/multipartparser.py
+++ b/django/http/multipartparser.py
@@ -12,7 +12,9 @@ import cgi
import sys
from django.conf import settings
-from django.core.exceptions import SuspiciousMultipartForm
+from django.core.exceptions import (
+ RequestDataTooBig, SuspiciousMultipartForm, TooManyFieldsSent,
+)
from django.core.files.uploadhandler import (
SkipFile, StopFutureHandlers, StopUpload,
)
@@ -145,6 +147,13 @@ class MultiPartParser(object):
old_field_name = None
counters = [0] * len(handlers)
+ # Number of bytes that have been read.
+ num_bytes_read = 0
+ # To count the number of keys in the request.
+ num_post_keys = 0
+ # To limit the amount of data read from the request.
+ read_size = None
+
try:
for item_type, meta_data, field_stream in Parser(stream, self._boundary):
if old_field_name:
@@ -166,15 +175,37 @@ class MultiPartParser(object):
field_name = force_text(field_name, encoding, errors='replace')
if item_type == FIELD:
+ # Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
+ num_post_keys += 1
+ if (settings.DATA_UPLOAD_MAX_NUMBER_FIELDS is not None and
+ settings.DATA_UPLOAD_MAX_NUMBER_FIELDS < num_post_keys):
+ raise TooManyFieldsSent(
+ 'The number of GET/POST parameters exceeded '
+ 'settings.DATA_UPLOAD_MAX_NUMBER_FIELDS.'
+ )
+
+ # Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.
+ if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None:
+ read_size = settings.DATA_UPLOAD_MAX_MEMORY_SIZE - num_bytes_read
+
# This is a post field, we can just set it in the post
if transfer_encoding == 'base64':
- raw_data = field_stream.read()
+ raw_data = field_stream.read(size=read_size)
+ num_bytes_read += len(raw_data)
try:
data = base64.b64decode(raw_data)
except _BASE64_DECODE_ERROR:
data = raw_data
else:
- data = field_stream.read()
+ data = field_stream.read(size=read_size)
+ num_bytes_read += len(data)
+
+ # Add two here to make the check consistent with the
+ # x-www-form-urlencoded check that includes '&='.
+ num_bytes_read += len(field_name) + 2
+ if (settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None and
+ num_bytes_read > settings.DATA_UPLOAD_MAX_MEMORY_SIZE):
+ raise RequestDataTooBig('Request body exceeded settings.DATA_UPLOAD_MAX_MEMORY_SIZE.')
self._post.appendlist(field_name,
force_text(data, encoding, errors='replace'))