mirror of
https://github.com/django/django.git
synced 2025-10-29 08:36:09 +00:00
Fixed #33506 -- Made QuerySet.bulk_update() perform atomic writes against write database.
The lack of _for_write = True assignment in bulk_update prior to accessing self.db resulted in the db_for_read database being used to wrap batched UPDATEs in a transaction. Also tweaked the batch queryset creation to also ensure they are executed against the same database as the opened transaction under all circumstances. Refs #23646, #33501.
This commit is contained in:
committed by
Mariusz Felisiak
parent
d70b4bea18
commit
d35ce682e3
@@ -725,6 +725,7 @@ class QuerySet:
|
||||
)
|
||||
# PK is used twice in the resulting update query, once in the filter
|
||||
# and once in the WHEN. Each field will also have one CAST.
|
||||
self._for_write = True
|
||||
connection = connections[self.db]
|
||||
max_batch_size = connection.ops.bulk_batch_size(["pk", "pk"] + fields, objs)
|
||||
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
|
||||
@@ -746,9 +747,10 @@ class QuerySet:
|
||||
update_kwargs[field.attname] = case_statement
|
||||
updates.append(([obj.pk for obj in batch_objs], update_kwargs))
|
||||
rows_updated = 0
|
||||
queryset = self.using(self.db)
|
||||
with transaction.atomic(using=self.db, savepoint=False):
|
||||
for pks, update_kwargs in updates:
|
||||
rows_updated += self.filter(pk__in=pks).update(**update_kwargs)
|
||||
rows_updated += queryset.filter(pk__in=pks).update(**update_kwargs)
|
||||
return rows_updated
|
||||
|
||||
bulk_update.alters_data = True
|
||||
|
||||
Reference in New Issue
Block a user