1
0
mirror of https://github.com/django/django.git synced 2025-10-24 14:16:09 +00:00

Fixed #16426 -- deletion of 1000+ objects with relations on SQLite

SQLite doesn't work with more than 1000 parameters in a single query.
The deletion code could generate queries that try to get related
objects for more than 1000 objects thus breaking the limit. Django now
splits the related object fetching into batches with at most 1000
parameters.

The tests and patch include some work done by Trac alias NiGhTTraX in
ticket #21205.
This commit is contained in:
Anssi Kääriäinen
2014-08-11 12:47:37 +03:00
parent cdfdcf4b70
commit dfadbdac6a
2 changed files with 69 additions and 6 deletions

View File

@@ -144,6 +144,18 @@ class Collector(object):
return False
return True
def get_del_batches(self, objs, field):
"""
Returns the objs in suitably sized batches for the used connection.
"""
conn_batch_size = max(
connections[self.using].ops.bulk_batch_size([field.name], objs), 1)
if len(objs) > conn_batch_size:
return [objs[i:i + conn_batch_size]
for i in range(0, len(objs), conn_batch_size)]
else:
return [objs]
def collect(self, objs, source=None, nullable=False, collect_related=True,
source_attr=None, reverse_dependency=False):
"""
@@ -192,11 +204,13 @@ class Collector(object):
field = related.field
if field.rel.on_delete == DO_NOTHING:
continue
sub_objs = self.related_objects(related, new_objs)
if self.can_fast_delete(sub_objs, from_field=field):
self.fast_deletes.append(sub_objs)
elif sub_objs:
field.rel.on_delete(self, field, sub_objs, self.using)
batches = self.get_del_batches(new_objs, field)
for batch in batches:
sub_objs = self.related_objects(related, batch)
if self.can_fast_delete(sub_objs, from_field=field):
self.fast_deletes.append(sub_objs)
elif sub_objs:
field.rel.on_delete(self, field, sub_objs, self.using)
for field in model._meta.virtual_fields:
if hasattr(field, 'bulk_related_objects'):
# Its something like generic foreign key.