1
0
mirror of https://github.com/django/django.git synced 2025-08-21 01:09:13 +00:00

Fixed #36490 -- Avoided unnecessary transaction in bulk_create.

When dealing with an heterogeneous set of object with regards to primary key
assignment that fits in a single batch there's no need to wrap the single
INSERT statement in a transaction.
This commit is contained in:
Simon Charette 2025-07-02 01:47:47 -04:00 committed by Sarah Boyce
parent 5e06b97095
commit e1671278e8
2 changed files with 61 additions and 17 deletions

View File

@ -5,6 +5,7 @@ The main QuerySet implementation. This provides the public API for the ORM.
import copy import copy
import operator import operator
import warnings import warnings
from contextlib import nullcontext
from functools import reduce from functools import reduce
from itertools import chain, islice from itertools import chain, islice
@ -802,7 +803,11 @@ class QuerySet(AltersData):
fields = [f for f in opts.concrete_fields if not f.generated] fields = [f for f in opts.concrete_fields if not f.generated]
objs = list(objs) objs = list(objs)
objs_with_pk, objs_without_pk = self._prepare_for_bulk_create(objs) objs_with_pk, objs_without_pk = self._prepare_for_bulk_create(objs)
with transaction.atomic(using=self.db, savepoint=False): if objs_with_pk and objs_without_pk:
context = transaction.atomic(using=self.db, savepoint=False)
else:
context = nullcontext()
with context:
self._handle_order_with_respect_to(objs) self._handle_order_with_respect_to(objs)
if objs_with_pk: if objs_with_pk:
returned_columns = self._batched_insert( returned_columns = self._batched_insert(
@ -1919,7 +1924,13 @@ class QuerySet(AltersData):
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = [] inserted_rows = []
bulk_return = connection.features.can_return_rows_from_bulk_insert bulk_return = connection.features.can_return_rows_from_bulk_insert
for item in [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]: batches = [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]
if len(batches) > 1:
context = transaction.atomic(using=self.db, savepoint=False)
else:
context = nullcontext()
with context:
for item in batches:
if bulk_return and ( if bulk_return and (
on_conflict is None or on_conflict == OnConflict.UPDATE on_conflict is None or on_conflict == OnConflict.UPDATE
): ):

View File

@ -14,6 +14,7 @@ from django.db.models import FileField, Value
from django.db.models.functions import Lower, Now from django.db.models.functions import Lower, Now
from django.test import ( from django.test import (
TestCase, TestCase,
TransactionTestCase,
override_settings, override_settings,
skipIfDBFeature, skipIfDBFeature,
skipUnlessDBFeature, skipUnlessDBFeature,
@ -884,3 +885,35 @@ class BulkCreateTests(TestCase):
def test_db_default_primary_key(self): def test_db_default_primary_key(self):
(obj,) = DbDefaultPrimaryKey.objects.bulk_create([DbDefaultPrimaryKey()]) (obj,) = DbDefaultPrimaryKey.objects.bulk_create([DbDefaultPrimaryKey()])
self.assertIsInstance(obj.id, datetime) self.assertIsInstance(obj.id, datetime)
@skipUnlessDBFeature("supports_transactions", "has_bulk_insert")
class BulkCreateTransactionTests(TransactionTestCase):
available_apps = ["bulk_create"]
def test_no_unnecessary_transaction(self):
with self.assertNumQueries(1):
Country.objects.bulk_create(
[Country(id=1, name="France", iso_two_letter="FR")]
)
with self.assertNumQueries(1):
Country.objects.bulk_create([Country(name="Canada", iso_two_letter="CA")])
def test_objs_with_and_without_pk(self):
with self.assertNumQueries(4):
Country.objects.bulk_create(
[
Country(id=1, name="France", iso_two_letter="FR"),
Country(name="Canada", iso_two_letter="CA"),
]
)
def test_multiple_batches(self):
with self.assertNumQueries(4):
Country.objects.bulk_create(
[
Country(name="France", iso_two_letter="FR"),
Country(name="Canada", iso_two_letter="CA"),
],
batch_size=1,
)