summaryrefslogtreecommitdiff
path: root/django/http/multipartparser.py
diff options
context:
space:
mode:
authorAnton Samarchyan <anton.samarchyan@savoirfairelinux.com>2017-01-24 16:23:56 -0500
committerTim Graham <timograham@gmail.com>2017-02-20 19:57:33 -0500
commit3eb679a86956d9eedf24492f0002de002f7180f5 (patch)
tree2ebd5947ef44bf4dd9f24715c3645d80f182c861 /django/http/multipartparser.py
parentafcf44c10108141e66a8aef0a3cbc151b1c16855 (diff)
Refs #27656 -- Updated django.forms/http docstring verbs according to PEP 257.
Diffstat (limited to 'django/http/multipartparser.py')
-rw-r--r--django/http/multipartparser.py23
1 files changed, 10 insertions, 13 deletions
diff --git a/django/http/multipartparser.py b/django/http/multipartparser.py
index 7b45404d9d..080f45f8ef 100644
--- a/django/http/multipartparser.py
+++ b/django/http/multipartparser.py
@@ -366,9 +366,8 @@ class LazyStream:
"""
Used when the exact number of bytes to read is unimportant.
- This procedure just returns whatever is chunk is conveniently returned
- from the iterator instead. Useful to avoid unnecessary bookkeeping if
- performance is an issue.
+ Return whatever chunk is conveniently returned from the iterator.
+ Useful to avoid unnecessary bookkeeping if performance is an issue.
"""
if self._leftover:
output = self._leftover
@@ -383,7 +382,7 @@ class LazyStream:
"""
Used to invalidate/disable this lazy stream.
- Replaces the producer with an empty list. Any leftover bytes that have
+ Replace the producer with an empty list. Any leftover bytes that have
already been read will still be reported upon read() and/or next().
"""
self._producer = []
@@ -393,7 +392,7 @@ class LazyStream:
def unget(self, bytes):
"""
- Places bytes back onto the front of the lazy stream.
+ Place bytes back onto the front of the lazy stream.
Future calls to read() will return those bytes first. The
stream position and thus tell() will be rewound.
@@ -406,7 +405,7 @@ class LazyStream:
def _update_unget_history(self, num_bytes):
"""
- Updates the unget history as a sanity check to see if we've pushed
+ Update the unget history as a sanity check to see if we've pushed
back the same number of bytes in one chunk. If we keep ungetting the
same number of bytes many times (here, 50), we're mostly likely in an
infinite loop of some sort. This is usually caused by a
@@ -429,8 +428,7 @@ class LazyStream:
class ChunkIter:
"""
An iterable that will yield chunks of data. Given a file-like object as the
- constructor, this object will yield chunks of read operations from that
- object.
+ constructor, yield chunks of read operations from that object.
"""
def __init__(self, flo, chunk_size=64 * 1024):
self.flo = flo
@@ -541,11 +539,10 @@ class BoundaryIter:
def _find_boundary(self, data, eof=False):
"""
- Finds a multipart boundary in data.
-
- Should no boundary exist in the data None is returned instead. Otherwise
- a tuple containing the indices of the following are returned:
+ Find a multipart boundary in data.
+ Should no boundary exist in the data, return None. Otherwise, return
+ a tuple containing the indices of the following:
* the end of current encapsulation
* the start of the next encapsulation
"""
@@ -578,7 +575,7 @@ def exhaust(stream_or_iterable):
def parse_boundary_stream(stream, max_header_size):
"""
- Parses one and exactly one stream that encapsulates a boundary.
+ Parse one and exactly one stream that encapsulates a boundary.
"""
# Stream at beginning of header, look for end of header
# and parse it if found. The header must fit within one