diff options
| author | Anssi Kääriäinen <akaariai@gmail.com> | 2012-04-29 04:22:05 +0300 |
|---|---|---|
| committer | Anssi Kääriäinen <akaariai@gmail.com> | 2012-09-02 19:17:15 +0300 |
| commit | 23268608512444de626e63dee1815fb5b5207a48 (patch) | |
| tree | 870e7f4ff110ef728bd2d5c32f197d7058332bc0 /tests/regressiontests/bulk_create | |
| parent | f0c469c7be1b8eb1326bb8ab263bdcdd1e8fa47e (diff) | |
[1.4.x] Fixed #17788 -- Added batch_size argument to qs.bulk_create()
The qs.bulk_create() method did not work with large batches together
with SQLite3. This commit adds a way to split the bulk into smaller
batches. The default batch size is unlimited except for SQLite3 where
the batch size is limited to 999 SQL parameters per batch.
Thanks to everybody who participated in the discussions at Trac.
Backpatch of 29132ebdef0e0b9c09e456b05f0e6a22f1106a4f from master (with
documentation changes removed).
Diffstat (limited to 'tests/regressiontests/bulk_create')
| -rw-r--r-- | tests/regressiontests/bulk_create/models.py | 6 | ||||
| -rw-r--r-- | tests/regressiontests/bulk_create/tests.py | 48 |
2 files changed, 49 insertions, 5 deletions
diff --git a/tests/regressiontests/bulk_create/models.py b/tests/regressiontests/bulk_create/models.py index a4c611d537..bc685bbbe4 100644 --- a/tests/regressiontests/bulk_create/models.py +++ b/tests/regressiontests/bulk_create/models.py @@ -18,4 +18,8 @@ class Pizzeria(Restaurant): pass class State(models.Model): - two_letter_code = models.CharField(max_length=2, primary_key=True)
\ No newline at end of file + two_letter_code = models.CharField(max_length=2, primary_key=True) + +class TwoFields(models.Model): + f1 = models.IntegerField(unique=True) + f2 = models.IntegerField(unique=True) diff --git a/tests/regressiontests/bulk_create/tests.py b/tests/regressiontests/bulk_create/tests.py index 0fa142b795..b4c3e7f17f 100644 --- a/tests/regressiontests/bulk_create/tests.py +++ b/tests/regressiontests/bulk_create/tests.py @@ -2,9 +2,11 @@ from __future__ import with_statement, absolute_import from operator import attrgetter -from django.test import TestCase, skipUnlessDBFeature +from django.db import connection +from django.test import TestCase, skipIfDBFeature +from django.test.utils import override_settings -from .models import Country, Restaurant, Pizzeria, State +from .models import Country, Restaurant, Pizzeria, State, TwoFields class BulkCreateTests(TestCase): @@ -27,7 +29,6 @@ class BulkCreateTests(TestCase): self.assertEqual(created, []) self.assertEqual(Country.objects.count(), 4) - @skipUnlessDBFeature("has_bulk_insert") def test_efficiency(self): with self.assertNumQueries(1): Country.objects.bulk_create(self.data) @@ -56,4 +57,43 @@ class BulkCreateTests(TestCase): ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", - ], attrgetter("two_letter_code"))
\ No newline at end of file + ], attrgetter("two_letter_code")) + + def test_large_batch(self): + with override_settings(DEBUG=True): + connection.queries = [] + TwoFields.objects.bulk_create([ + TwoFields(f1=i, f2=i+1) for i in range(0, 1001) + ]) + self.assertTrue(len(connection.queries) < 10) + self.assertEqual(TwoFields.objects.count(), 1001) + self.assertEqual( + TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(), + 101) + self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101) + + def test_large_batch_mixed(self): + """ + Test inserting a large batch with objects having primary key set + mixed together with objects without PK set. + """ + with override_settings(DEBUG=True): + connection.queries = [] + TwoFields.objects.bulk_create([ + TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i+1) + for i in range(100000, 101000)]) + self.assertTrue(len(connection.queries) < 10) + self.assertEqual(TwoFields.objects.count(), 1000) + # We can't assume much about the ID's created, except that the above + # created IDs must exist. + id_range = range(100000, 101000, 2) + self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500) + self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500) + + def test_explicit_batch_size(self): + objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)] + with self.assertNumQueries(2): + TwoFields.objects.bulk_create(objs, 50) + TwoFields.objects.all().delete() + with self.assertNumQueries(1): + TwoFields.objects.bulk_create(objs, len(objs)) |
