summaryrefslogtreecommitdiff
path: root/tests/decorators
diff options
context:
space:
mode:
authorfarhan <farhanalirazaazeemi@gmail.com>2025-12-13 23:33:33 +0500
committerJacob Walls <jacobtylerwalls@gmail.com>2026-03-09 08:41:00 -0400
commit12bb16da8fbadac34e2de318cc79d7d765f35a96 (patch)
treee1ad54809d460e2139f1436492432072a8c45427 /tests/decorators
parent787166fe27b0e7c7f97505da5766cfa72e76ae25 (diff)
Fixed #36293 -- Avoided buffering streaming responses in GZipMiddleware.
This avoids latency and/or blocking. The example of streaming a CSV file was rewritten to employ batching for greater efficiency in all layers (db, HTTP, etc.). The improved performance from batching should outweigh the drag introduced by an additional byte for each flush. Co-authored-by: huoyinghui <huoyinghui@users.noreply.github.com>
Diffstat (limited to 'tests/decorators')
-rw-r--r--tests/decorators/test_gzip.py32
1 files changed, 31 insertions, 1 deletions
diff --git a/tests/decorators/test_gzip.py b/tests/decorators/test_gzip.py
index 2d64c171f7..8cd0869b53 100644
--- a/tests/decorators/test_gzip.py
+++ b/tests/decorators/test_gzip.py
@@ -1,6 +1,6 @@
from inspect import iscoroutinefunction
-from django.http import HttpRequest, HttpResponse
+from django.http import HttpRequest, HttpResponse, StreamingHttpResponse
from django.test import SimpleTestCase
from django.views.decorators.gzip import gzip_page
@@ -44,3 +44,33 @@ class GzipPageTests(SimpleTestCase):
response = await async_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.get("Content-Encoding"), "gzip")
+
+ def test_streaming_response_yields_chunks_incrementally(self):
+ @gzip_page
+ def stream_view(request):
+ return StreamingHttpResponse(self.content.encode() for _ in range(5))
+
+ request = HttpRequest()
+ request.META["HTTP_ACCEPT_ENCODING"] = "gzip"
+ response = stream_view(request)
+ compressed_chunks = list(response)
+ # Each input chunk should produce compressed output, not buffer
+ # everything into a single chunk.
+ self.assertGreater(len(compressed_chunks), 2)
+
+ async def test_async_streaming_response_yields_chunks_incrementally(self):
+ @gzip_page
+ async def stream_view(request):
+ async def content():
+ for _ in range(5):
+ yield self.content.encode()
+
+ return StreamingHttpResponse(content())
+
+ request = HttpRequest()
+ request.META["HTTP_ACCEPT_ENCODING"] = "gzip"
+ response = await stream_view(request)
+ compressed_chunks = [chunk async for chunk in response]
+ # Each input chunk should produce compressed output, not buffer
+ # everything into a single chunk.
+ self.assertGreater(len(compressed_chunks), 2)