summaryrefslogtreecommitdiff
path: root/docs/howto/outputting-csv.txt
diff options
context:
space:
mode:
Diffstat (limited to 'docs/howto/outputting-csv.txt')
-rw-r--r--docs/howto/outputting-csv.txt12
1 files changed, 10 insertions, 2 deletions
diff --git a/docs/howto/outputting-csv.txt b/docs/howto/outputting-csv.txt
index c5ae7094d2..10de00503e 100644
--- a/docs/howto/outputting-csv.txt
+++ b/docs/howto/outputting-csv.txt
@@ -67,9 +67,12 @@ avoid a load balancer dropping a connection that might have otherwise timed out
while the server was generating the response.
In this example, we make full use of Python generators to efficiently handle
-the assembly and transmission of a large CSV file::
+the assembly and transmission of a large CSV file. Rows are batched together
+to reduce HTTP overhead and improve compression efficiency when used with
+:class:`~django.middleware.gzip.GZipMiddleware`::
import csv
+ from itertools import batched
from django.http import StreamingHttpResponse
@@ -92,8 +95,13 @@ the assembly and transmission of a large CSV file::
rows = (["Row {}".format(idx), str(idx)] for idx in range(65536))
pseudo_buffer = Echo()
writer = csv.writer(pseudo_buffer)
+
+ def stream_batched_rows():
+ for batch in batched(rows, 100):
+ yield "".join(writer.writerow(row) for row in batch)
+
return StreamingHttpResponse(
- (writer.writerow(row) for row in rows),
+ stream_batched_rows(),
content_type="text/csv",
headers={"Content-Disposition": 'attachment; filename="somefilename.csv"'},
)