mirror of
https://github.com/django/django.git
synced 2025-07-05 10:19:20 +00:00
[soc2009/multidb] Merged multidb up to trunk r10914
git-svn-id: http://code.djangoproject.com/svn/django/branches/soc2009/multidb@10917 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
b1f70d9e53
commit
2f2cfac142
@ -35,7 +35,7 @@ class GeoWhereNode(WhereNode):
|
|||||||
return super(WhereNode, self).add(data, connector)
|
return super(WhereNode, self).add(data, connector)
|
||||||
|
|
||||||
obj, lookup_type, value = data
|
obj, lookup_type, value = data
|
||||||
alias, col, field = obj.alias, obj.col, obj.field
|
col, field = obj.col, obj.field
|
||||||
|
|
||||||
if not hasattr(field, "geom_type"):
|
if not hasattr(field, "geom_type"):
|
||||||
# Not a geographic field, so call `WhereNode.add`.
|
# Not a geographic field, so call `WhereNode.add`.
|
||||||
@ -76,7 +76,7 @@ class GeoWhereNode(WhereNode):
|
|||||||
# the `get_geo_where_clause` to construct the appropriate
|
# the `get_geo_where_clause` to construct the appropriate
|
||||||
# spatial SQL when `make_atom` is called.
|
# spatial SQL when `make_atom` is called.
|
||||||
annotation = GeoAnnotation(field, value, where)
|
annotation = GeoAnnotation(field, value, where)
|
||||||
return super(WhereNode, self).add(((alias, col, field.db_type()), lookup_type, annotation, params), connector)
|
return super(WhereNode, self).add(((obj.alias, col, field.db_type()), lookup_type, annotation, params), connector)
|
||||||
|
|
||||||
def make_atom(self, child, qn):
|
def make_atom(self, child, qn):
|
||||||
obj, lookup_type, value_annot, params = child
|
obj, lookup_type, value_annot, params = child
|
||||||
|
@ -32,3 +32,13 @@ class Parcel(models.Model):
|
|||||||
border2 = models.PolygonField(srid=2276)
|
border2 = models.PolygonField(srid=2276)
|
||||||
objects = models.GeoManager()
|
objects = models.GeoManager()
|
||||||
def __unicode__(self): return self.name
|
def __unicode__(self): return self.name
|
||||||
|
|
||||||
|
# These use the GeoManager but do not have any geographic fields.
|
||||||
|
class Author(models.Model):
|
||||||
|
name = models.CharField(max_length=100)
|
||||||
|
objects = models.GeoManager()
|
||||||
|
|
||||||
|
class Book(models.Model):
|
||||||
|
title = models.CharField(max_length=100)
|
||||||
|
author = models.ForeignKey(Author, related_name='books')
|
||||||
|
objects = models.GeoManager()
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import os, unittest
|
import os, unittest
|
||||||
from django.contrib.gis.geos import *
|
from django.contrib.gis.geos import *
|
||||||
from django.contrib.gis.db.backend import SpatialBackend
|
from django.contrib.gis.db.backend import SpatialBackend
|
||||||
from django.contrib.gis.db.models import F, Extent, Union
|
from django.contrib.gis.db.models import Count, Extent, F, Union
|
||||||
from django.contrib.gis.tests.utils import no_mysql, no_oracle, no_spatialite
|
from django.contrib.gis.tests.utils import no_mysql, no_oracle, no_spatialite
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from models import City, Location, DirectoryEntry, Parcel
|
from models import City, Location, DirectoryEntry, Parcel, Book, Author
|
||||||
|
|
||||||
cities = (('Aurora', 'TX', -97.516111, 33.058333),
|
cities = (('Aurora', 'TX', -97.516111, 33.058333),
|
||||||
('Roswell', 'NM', -104.528056, 33.387222),
|
('Roswell', 'NM', -104.528056, 33.387222),
|
||||||
@ -196,8 +196,8 @@ class RelatedGeoModelTest(unittest.TestCase):
|
|||||||
# ID values do not match their City ID values.
|
# ID values do not match their City ID values.
|
||||||
loc1 = Location.objects.create(point='POINT (-95.363151 29.763374)')
|
loc1 = Location.objects.create(point='POINT (-95.363151 29.763374)')
|
||||||
loc2 = Location.objects.create(point='POINT (-96.801611 32.782057)')
|
loc2 = Location.objects.create(point='POINT (-96.801611 32.782057)')
|
||||||
dallas = City.objects.create(name='Dallas', location=loc2)
|
dallas = City.objects.create(name='Dallas', state='TX', location=loc2)
|
||||||
houston = City.objects.create(name='Houston', location=loc1)
|
houston = City.objects.create(name='Houston', state='TX', location=loc1)
|
||||||
|
|
||||||
# The expected ID values -- notice the last two location IDs
|
# The expected ID values -- notice the last two location IDs
|
||||||
# are out of order. We want to make sure that the related
|
# are out of order. We want to make sure that the related
|
||||||
@ -231,6 +231,32 @@ class RelatedGeoModelTest(unittest.TestCase):
|
|||||||
q = pickle.loads(q_str)
|
q = pickle.loads(q_str)
|
||||||
self.assertEqual(GeoQuery, q.__class__)
|
self.assertEqual(GeoQuery, q.__class__)
|
||||||
|
|
||||||
|
def test12_count(self):
|
||||||
|
"Testing `Count` aggregate use with the `GeoManager`. See #11087."
|
||||||
|
# Creating a new City, 'Fort Worth', that uses the same location
|
||||||
|
# as Dallas.
|
||||||
|
dallas = City.objects.get(name='Dallas')
|
||||||
|
ftworth = City.objects.create(name='Fort Worth', state='TX', location=dallas.location)
|
||||||
|
|
||||||
|
# Count annotation should be 2 for the Dallas location now.
|
||||||
|
loc = Location.objects.annotate(num_cities=Count('city')).get(id=dallas.location.id)
|
||||||
|
self.assertEqual(2, loc.num_cities)
|
||||||
|
|
||||||
|
# Creating some data for the Book/Author non-geo models that
|
||||||
|
# use GeoManager. See #11087.
|
||||||
|
tp = Author.objects.create(name='Trevor Paglen')
|
||||||
|
Book.objects.create(title='Torture Taxi', author=tp)
|
||||||
|
Book.objects.create(title='I Could Tell You But Then You Would Have to be Destroyed by Me', author=tp)
|
||||||
|
Book.objects.create(title='Blank Spots on the Map', author=tp)
|
||||||
|
wp = Author.objects.create(name='William Patry')
|
||||||
|
Book.objects.create(title='Patry on Copyright', author=wp)
|
||||||
|
|
||||||
|
# Should only be one author (Trevor Paglen) returned by this query, and
|
||||||
|
# the annotation should have 3 for the number of books.
|
||||||
|
qs = Author.objects.annotate(num_books=Count('books')).filter(num_books__gt=1)
|
||||||
|
self.assertEqual(1, len(qs))
|
||||||
|
self.assertEqual(3, qs[0].num_books)
|
||||||
|
|
||||||
# TODO: Related tests for KML, GML, and distance lookups.
|
# TODO: Related tests for KML, GML, and distance lookups.
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
|
@ -358,10 +358,11 @@ class QuerySet(object):
|
|||||||
|
|
||||||
# Delete objects in chunks to prevent the list of related objects from
|
# Delete objects in chunks to prevent the list of related objects from
|
||||||
# becoming too long.
|
# becoming too long.
|
||||||
|
seen_objs = None
|
||||||
while 1:
|
while 1:
|
||||||
# Collect all the objects to be deleted in this chunk, and all the
|
# Collect all the objects to be deleted in this chunk, and all the
|
||||||
# objects that are related to the objects that are to be deleted.
|
# objects that are related to the objects that are to be deleted.
|
||||||
seen_objs = CollectedObjects()
|
seen_objs = CollectedObjects(seen_objs)
|
||||||
for object in del_query[:CHUNK_SIZE]:
|
for object in del_query[:CHUNK_SIZE]:
|
||||||
object._collect_sub_objects(seen_objs)
|
object._collect_sub_objects(seen_objs)
|
||||||
|
|
||||||
|
@ -32,11 +32,21 @@ class CollectedObjects(object):
|
|||||||
|
|
||||||
This is used for the database object deletion routines so that we can
|
This is used for the database object deletion routines so that we can
|
||||||
calculate the 'leaf' objects which should be deleted first.
|
calculate the 'leaf' objects which should be deleted first.
|
||||||
|
|
||||||
|
previously_seen is an optional argument. It must be a CollectedObjects
|
||||||
|
instance itself; any previously_seen collected object will be blocked from
|
||||||
|
being added to this instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, previously_seen=None):
|
||||||
self.data = {}
|
self.data = {}
|
||||||
self.children = {}
|
self.children = {}
|
||||||
|
if previously_seen:
|
||||||
|
self.blocked = previously_seen.blocked
|
||||||
|
for cls, seen in previously_seen.data.items():
|
||||||
|
self.blocked.setdefault(cls, SortedDict()).update(seen)
|
||||||
|
else:
|
||||||
|
self.blocked = {}
|
||||||
|
|
||||||
def add(self, model, pk, obj, parent_model, nullable=False):
|
def add(self, model, pk, obj, parent_model, nullable=False):
|
||||||
"""
|
"""
|
||||||
@ -53,6 +63,9 @@ class CollectedObjects(object):
|
|||||||
Returns True if the item already existed in the structure and
|
Returns True if the item already existed in the structure and
|
||||||
False otherwise.
|
False otherwise.
|
||||||
"""
|
"""
|
||||||
|
if pk in self.blocked.get(model, {}):
|
||||||
|
return True
|
||||||
|
|
||||||
d = self.data.setdefault(model, SortedDict())
|
d = self.data.setdefault(model, SortedDict())
|
||||||
retval = pk in d
|
retval = pk in d
|
||||||
d[pk] = obj
|
d[pk] = obj
|
||||||
|
1
tests/regressiontests/delete_regress/__init__.py
Normal file
1
tests/regressiontests/delete_regress/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
61
tests/regressiontests/delete_regress/models.py
Normal file
61
tests/regressiontests/delete_regress/models.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
from django.conf import settings
|
||||||
|
from django.db import models, backend, connection, transaction
|
||||||
|
from django.db.models import sql, query
|
||||||
|
from django.test import TransactionTestCase
|
||||||
|
|
||||||
|
class Book(models.Model):
|
||||||
|
pagecount = models.IntegerField()
|
||||||
|
|
||||||
|
# Can't run this test under SQLite, because you can't
|
||||||
|
# get two connections to an in-memory database.
|
||||||
|
if settings.DATABASE_ENGINE != 'sqlite3':
|
||||||
|
class DeleteLockingTest(TransactionTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
# Create a second connection to the database
|
||||||
|
self.conn2 = backend.DatabaseWrapper({
|
||||||
|
'DATABASE_HOST': settings.DATABASE_HOST,
|
||||||
|
'DATABASE_NAME': settings.DATABASE_NAME,
|
||||||
|
'DATABASE_OPTIONS': settings.DATABASE_OPTIONS,
|
||||||
|
'DATABASE_PASSWORD': settings.DATABASE_PASSWORD,
|
||||||
|
'DATABASE_PORT': settings.DATABASE_PORT,
|
||||||
|
'DATABASE_USER': settings.DATABASE_USER,
|
||||||
|
'TIME_ZONE': settings.TIME_ZONE,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Put both DB connections into managed transaction mode
|
||||||
|
transaction.enter_transaction_management()
|
||||||
|
transaction.managed(True)
|
||||||
|
self.conn2._enter_transaction_management(True)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# Close down the second connection.
|
||||||
|
transaction.leave_transaction_management()
|
||||||
|
self.conn2.close()
|
||||||
|
|
||||||
|
def test_concurrent_delete(self):
|
||||||
|
"Deletes on concurrent transactions don't collide and lock the database. Regression for #9479"
|
||||||
|
|
||||||
|
# Create some dummy data
|
||||||
|
b1 = Book(id=1, pagecount=100)
|
||||||
|
b2 = Book(id=2, pagecount=200)
|
||||||
|
b3 = Book(id=3, pagecount=300)
|
||||||
|
b1.save()
|
||||||
|
b2.save()
|
||||||
|
b3.save()
|
||||||
|
|
||||||
|
transaction.commit()
|
||||||
|
|
||||||
|
self.assertEquals(3, Book.objects.count())
|
||||||
|
|
||||||
|
# Delete something using connection 2.
|
||||||
|
cursor2 = self.conn2.cursor()
|
||||||
|
cursor2.execute('DELETE from delete_regress_book WHERE id=1')
|
||||||
|
self.conn2._commit();
|
||||||
|
|
||||||
|
# Now perform a queryset delete that covers the object
|
||||||
|
# deleted in connection 2. This causes an infinite loop
|
||||||
|
# under MySQL InnoDB unless we keep track of already
|
||||||
|
# deleted objects.
|
||||||
|
Book.objects.filter(pagecount__lt=250).delete()
|
||||||
|
transaction.commit()
|
||||||
|
self.assertEquals(1, Book.objects.count())
|
Loading…
x
Reference in New Issue
Block a user