diff --git a/django/db/models/query.py b/django/db/models/query.py index 12eebe6555..ba75601398 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -37,11 +37,21 @@ class CollectedObjects(object): This is used for the database object deletion routines so that we can calculate the 'leaf' objects which should be deleted first. + + previously_seen is an optional argument. It must be a CollectedObjects + instance itself; any previously_seen collected object will be blocked from + being added to this instance. """ - def __init__(self): + def __init__(self, previously_seen=None): self.data = {} self.children = {} + if previously_seen: + self.blocked = previously_seen.blocked + for cls, seen in previously_seen.data.items(): + self.blocked.setdefault(cls, SortedDict()).update(seen) + else: + self.blocked = {} def add(self, model, pk, obj, parent_model, nullable=False): """ @@ -58,6 +68,9 @@ class CollectedObjects(object): Returns True if the item already existed in the structure and False otherwise. """ + if pk in self.blocked.get(model, {}): + return True + d = self.data.setdefault(model, SortedDict()) retval = pk in d d[pk] = obj @@ -390,10 +403,11 @@ class QuerySet(object): # Delete objects in chunks to prevent the list of related objects from # becoming too long. + seen_objs = None while 1: # Collect all the objects to be deleted in this chunk, and all the # objects that are related to the objects that are to be deleted. - seen_objs = CollectedObjects() + seen_objs = CollectedObjects(seen_objs) for object in del_query[:CHUNK_SIZE]: object._collect_sub_objects(seen_objs) diff --git a/tests/regressiontests/delete_regress/__init__.py b/tests/regressiontests/delete_regress/__init__.py new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/tests/regressiontests/delete_regress/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/regressiontests/delete_regress/models.py b/tests/regressiontests/delete_regress/models.py new file mode 100644 index 0000000000..6886d28dfc --- /dev/null +++ b/tests/regressiontests/delete_regress/models.py @@ -0,0 +1,53 @@ +from django.conf import settings +from django.db import models, backend, connection, transaction +from django.db.models import sql, query +from django.test import TestCase + +class Book(models.Model): + pagecount = models.IntegerField() + +# Can't run this test under SQLite, because you can't +# get two connections to an in-memory database. +if settings.DATABASE_ENGINE != 'sqlite3': + class DeleteLockingTest(TestCase): + def setUp(self): + # Create a second connection to the database + self.conn2 = backend.DatabaseWrapper(**settings.DATABASE_OPTIONS) + + # Put both DB connections into managed transaction mode + transaction.enter_transaction_management() + transaction.managed(True) + # self.conn2._enter_transaction_management(True) + + def tearDown(self): + # Close down the second connection. + transaction.leave_transaction_management() + self.conn2.close() + + def test_concurrent_delete(self): + "Deletes on concurrent transactions don't collide and lock the database. Regression for #9479" + + # Create some dummy data + b1 = Book(id=1, pagecount=100) + b2 = Book(id=2, pagecount=200) + b3 = Book(id=3, pagecount=300) + b1.save() + b2.save() + b3.save() + + transaction.commit() + + self.assertEquals(3, Book.objects.count()) + + # Delete something using connection 2. + cursor2 = self.conn2.cursor() + cursor2.execute('DELETE from delete_regress_book WHERE id=1') + self.conn2._commit(); + + # Now perform a queryset delete that covers the object + # deleted in connection 2. This causes an infinite loop + # under MySQL InnoDB unless we keep track of already + # deleted objects. + Book.objects.filter(pagecount__lt=250).delete() + transaction.commit() + self.assertEquals(1, Book.objects.count())