1
0
mirror of https://github.com/django/django.git synced 2025-07-05 02:09:13 +00:00

[soc2009/multidb] Made instances sticky to the database that created them.

This involves:
 * Adding a _state attribute to instances to track instance state
 * Making db a state attribute, and making the db a public attribute on querysets.

Patch from Russell Keith-Magee.

git-svn-id: http://code.djangoproject.com/svn/django/branches/soc2009/multidb@11769 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Alex Gaynor 2009-11-23 16:43:54 +00:00
parent cec677a782
commit da909ae3a0
12 changed files with 280 additions and 135 deletions

4
TODO
View File

@ -11,14 +11,10 @@ Required for v1.2
* Should we take the opportunity to modify DB backends to use fully qualified paths?
* Should we clean up DATABASES['DATABASE_NAME'] to DATABASES['NAME'] etc?
* Meta.using? Is is still required/desirable?
* Fix the regressiontests/multiple_database test failures
* Give instances knowledge of the database from which they were loaded.
* Cascade instance using to m2m queries
* Cleanup of new API entry points
* validate() on a field
* name/purpose clash with Honza?
* any overlap with existing methods?
* Accessing _using in BaseModelFormSet.
Optional for v1.2
~~~~~~~~~~~~~~~~~

View File

@ -230,6 +230,13 @@ class ModelBase(type):
signals.class_prepared.send(sender=cls)
class ModelState(object):
"""
A class for storing instance state
"""
def __init__(self, db=None):
self.db = db
class Model(object):
__metaclass__ = ModelBase
_deferred = False
@ -237,6 +244,9 @@ class Model(object):
def __init__(self, *args, **kwargs):
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
# Set up the storage for instane state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
@ -428,7 +438,7 @@ class Model(object):
need for overrides of save() to pass around internal-only parameters
('raw', 'cls', and 'origin').
"""
using = using or self._meta.using or DEFAULT_DB_ALIAS
using = using or self._state.db or self._meta.using or DEFAULT_DB_ALIAS
connection = connections[using]
assert not (force_insert and force_update)
if cls is None:
@ -514,6 +524,10 @@ class Model(object):
setattr(self, meta.pk.attname, result)
transaction.commit_unless_managed(using=using)
# Store the database on which the object was saved
self._state.db = using
# Signal that the save is complete
if origin and not meta.auto_created:
signals.post_save.send(sender=origin, instance=self,
created=(not record_exists), raw=raw)
@ -577,7 +591,7 @@ class Model(object):
parent_obj._collect_sub_objects(seen_objs)
def delete(self, using=None):
using = using or self._meta.using or DEFAULT_DB_ALIAS
using = using or self._state.db or self._meta.using or DEFAULT_DB_ALIAS
connection = connections[using]
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)

View File

@ -195,7 +195,7 @@ class SingleRelatedObjectDescriptor(object):
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
rel_obj = self.related.model._base_manager.get(**params)
rel_obj = self.related.model._base_manager.using(instance._state.db).get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
@ -259,9 +259,9 @@ class ReverseSingleRelatedObjectDescriptor(object):
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.get(**params)
rel_obj = rel_mgr.using(instance._state.db).get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).get(**params)
rel_obj = QuerySet(self.field.rel.to).using(instance._state.db).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
@ -359,14 +359,14 @@ class ForeignRelatedObjectsDescriptor(object):
class RelatedManager(superclass):
def get_query_set(self):
return superclass.get_query_set(self).filter(**(self.core_filters))
return superclass.get_query_set(self).using(instance._state.db).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
setattr(obj, rel_field.name, instance)
obj.save()
obj.save(using=instance._state.db)
add.alters_data = True
def create(self, **kwargs):
@ -378,7 +378,7 @@ class ForeignRelatedObjectsDescriptor(object):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
return super(RelatedManager, self).get_or_create(**kwargs)
return super(RelatedManager, self).using(instance._state.db).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
@ -389,7 +389,7 @@ class ForeignRelatedObjectsDescriptor(object):
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
obj.save(using=instance._state.db)
else:
raise rel_field.rel.to.DoesNotExist, "%r is not related to %r." % (obj, instance)
remove.alters_data = True
@ -397,7 +397,7 @@ class ForeignRelatedObjectsDescriptor(object):
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
obj.save(using=instance._state.db)
clear.alters_data = True
manager = RelatedManager()
@ -463,14 +463,14 @@ def create_many_related_manager(superclass, rel=False):
if not rel.through._meta.auto_created:
opts = through._meta
raise AttributeError, "Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)
new_obj = super(ManyRelatedManager, self).create(**kwargs)
new_obj = super(ManyRelatedManager, self).using(self.instance._state.db).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
obj, created = \
super(ManyRelatedManager, self).get_or_create(**kwargs)
super(ManyRelatedManager, self).using(self.instance._state.db).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
@ -495,7 +495,7 @@ def create_many_related_manager(superclass, rel=False):
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
else:
new_ids.add(obj)
vals = self.through._default_manager.values_list(target_field_name, flat=True)
vals = self.through._default_manager.using(self.instance._state.db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
@ -504,7 +504,7 @@ def create_many_related_manager(superclass, rel=False):
# Add the ones that aren't there already
for obj_id in (new_ids - vals):
self.through._default_manager.create(**{
self.through._default_manager.using(self.instance._state.db).create(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
@ -524,14 +524,14 @@ def create_many_related_manager(superclass, rel=False):
else:
old_ids.add(obj)
# Remove the specified objects from the join table
self.through._default_manager.filter(**{
self.through._default_manager.using(self.instance._state.db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
def _clear_items(self, source_field_name):
# source_col_name: the PK colname in join_table for the source object
self.through._default_manager.filter(**{
self.through._default_manager.using(self.instance._state.db).filter(**{
source_field_name: self._pk_val
}).delete()

View File

@ -38,7 +38,7 @@ class QuerySet(object):
self._result_cache = None
self._iter = None
self._sticky_filter = False
self._using = using
self.db = using
########################
# PYTHON MAGIC METHODS #
@ -236,8 +236,8 @@ class QuerySet(object):
else:
init_list.append(field.attname)
model_cls = deferred_class_factory(self.model, skip)
compiler = self.query.get_compiler(using=self._using)
compiler = self.query.get_compiler(using=self.db)
for row in compiler.results_iter():
if fill_cache:
obj, _ = get_cached_row(self.model, row,
@ -260,6 +260,9 @@ class QuerySet(object):
for i, aggregate in enumerate(aggregate_select):
setattr(obj, aggregate, row[i+aggregate_start])
# Store the source database of the object
obj._state.db = self.db
yield obj
def aggregate(self, *args, **kwargs):
@ -279,7 +282,7 @@ class QuerySet(object):
query.add_aggregate(aggregate_expr, self.model, alias,
is_summary=True)
return query.get_aggregation(using=self._using)
return query.get_aggregation(using=self.db)
def count(self):
"""
@ -292,7 +295,7 @@ class QuerySet(object):
if self._result_cache is not None and not self._iter:
return len(self._result_cache)
return self.query.get_count(using=self._using)
return self.query.get_count(using=self.db)
def get(self, *args, **kwargs):
"""
@ -315,7 +318,7 @@ class QuerySet(object):
and returning the created object.
"""
obj = self.model(**kwargs)
obj.save(force_insert=True, using=self._using)
obj.save(force_insert=True, using=self.db)
return obj
def get_or_create(self, **kwargs):
@ -334,12 +337,12 @@ class QuerySet(object):
params = dict([(k, v) for k, v in kwargs.items() if '__' not in k])
params.update(defaults)
obj = self.model(**params)
sid = transaction.savepoint(using=self._using)
obj.save(force_insert=True, using=self._using)
transaction.savepoint_commit(sid, using=self._using)
sid = transaction.savepoint(using=self.db)
obj.save(force_insert=True, using=self.db)
transaction.savepoint_commit(sid, using=self.db)
return obj, True
except IntegrityError, e:
transaction.savepoint_rollback(sid, using=self._using)
transaction.savepoint_rollback(sid, using=self.db)
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
@ -399,7 +402,7 @@ class QuerySet(object):
if not seen_objs:
break
delete_objects(seen_objs, del_query._using)
delete_objects(seen_objs, del_query.db)
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
@ -414,20 +417,20 @@ class QuerySet(object):
"Cannot update a query once a slice has been taken."
query = self.query.clone(sql.UpdateQuery)
query.add_update_values(kwargs)
if not transaction.is_managed(using=self._using):
transaction.enter_transaction_management(using=self._using)
if not transaction.is_managed(using=self.db):
transaction.enter_transaction_management(using=self.db)
forced_managed = True
else:
forced_managed = False
try:
rows = query.get_compiler(self._using).execute_sql(None)
rows = query.get_compiler(self.db).execute_sql(None)
if forced_managed:
transaction.commit(using=self._using)
transaction.commit(using=self.db)
else:
transaction.commit_unless_managed(using=self._using)
transaction.commit_unless_managed(using=self.db)
finally:
if forced_managed:
transaction.leave_transaction_management(using=self._using)
transaction.leave_transaction_management(using=self.db)
self._result_cache = None
return rows
update.alters_data = True
@ -444,12 +447,12 @@ class QuerySet(object):
query = self.query.clone(sql.UpdateQuery)
query.add_update_fields(values)
self._result_cache = None
return query.get_compiler(self._using).execute_sql(None)
return query.get_compiler(self.db).execute_sql(None)
_update.alters_data = True
def exists(self):
if self._result_cache is None:
return self.query.has_results(using=self._using)
return self.query.has_results(using=self.db)
return bool(self._result_cache)
##################################################
@ -661,7 +664,7 @@ class QuerySet(object):
Selects which database this QuerySet should excecute it's query against.
"""
clone = self._clone()
clone._using = alias
clone.db = alias
return clone
###################################
@ -692,7 +695,7 @@ class QuerySet(object):
if self._sticky_filter:
query.filter_is_sticky = True
c = klass(model=self.model, query=query)
c._using = self._using
c.db = self.db
c.__dict__.update(kwargs)
if setup and hasattr(c, '_setup_query'):
c._setup_query()
@ -747,7 +750,7 @@ class QuerySet(object):
Returns the internal query's SQL and parameters (as a tuple).
"""
obj = self.values("pk")
if connection == connections[obj._using]:
if connection == connections[obj.db]:
return obj.query.get_compiler(connection=connection).as_nested_sql()
raise ValueError("Can't do subqueries with queries on different DBs.")
@ -779,7 +782,7 @@ class ValuesQuerySet(QuerySet):
names = extra_names + field_names + aggregate_names
for row in self.query.get_compiler(self._using).results_iter():
for row in self.query.get_compiler(self.db).results_iter():
yield dict(zip(names, row))
def _setup_query(self):
@ -876,7 +879,7 @@ class ValuesQuerySet(QuerySet):
% self.__class__.__name__)
obj = self._clone()
if connection == connections[obj._using]:
if connection == connections[obj.db]:
return obj.query.get_compiler(connection=connection).as_nested_sql()
raise ValueError("Can't do subqueries with queries on different DBs.")
@ -894,10 +897,10 @@ class ValuesQuerySet(QuerySet):
class ValuesListQuerySet(ValuesQuerySet):
def iterator(self):
if self.flat and len(self._fields) == 1:
for row in self.query.get_compiler(self._using).results_iter():
for row in self.query.get_compiler(self.db).results_iter():
yield row[0]
elif not self.query.extra_select and not self.query.aggregate_select:
for row in self.query.get_compiler(self._using).results_iter():
for row in self.query.get_compiler(self.db).results_iter():
yield tuple(row)
else:
# When extra(select=...) or an annotation is involved, the extra
@ -916,7 +919,7 @@ class ValuesListQuerySet(ValuesQuerySet):
else:
fields = names
for row in self.query.get_compiler(self._using).results_iter():
for row in self.query.get_compiler(self.db).results_iter():
data = dict(zip(names, row))
yield tuple([data[f] for f in fields])
@ -928,7 +931,7 @@ class ValuesListQuerySet(ValuesQuerySet):
class DateQuerySet(QuerySet):
def iterator(self):
return self.query.get_compiler(self._using).results_iter()
return self.query.get_compiler(self.db).results_iter()
def _setup_query(self):
"""

View File

@ -472,7 +472,7 @@ class BaseModelFormSet(BaseFormSet):
pk = self.data[pk_key]
pk_field = self.model._meta.pk
pk = pk_field.get_db_prep_lookup('exact', pk,
connection=connections[self.get_queryset()._using])
connection=connections[self.get_queryset().db])
if isinstance(pk, list):
pk = pk[0]
kwargs['instance'] = self._existing_object(pk)

View File

@ -157,7 +157,7 @@ False
# The underlying query only makes one join when a related table is referenced twice.
>>> queryset = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
>>> sql = queryset.query.get_compiler(queryset._using).as_sql()[0]
>>> sql = queryset.query.get_compiler(queryset.db).as_sql()[0]
>>> sql.count('INNER JOIN')
1

View File

@ -250,10 +250,10 @@ FieldError: Cannot resolve keyword 'foo' into field. Choices are: authors, conta
>>> out = pickle.dumps(qs)
# Then check that the round trip works.
>>> query = qs.query.get_compiler(qs._using).as_sql()[0]
>>> query = qs.query.get_compiler(qs.db).as_sql()[0]
>>> select_fields = qs.query.select_fields
>>> query2 = pickle.loads(pickle.dumps(qs))
>>> query2.query.get_compiler(query2._using).as_sql()[0] == query
>>> query2.query.get_compiler(query2.db).as_sql()[0] == query
True
>>> query2.query.select_fields = select_fields

View File

@ -315,7 +315,7 @@ DoesNotExist: ArticleWithAuthor matching query does not exist.
# Regression test for #9390. This necessarily pokes at the SQL string for the
# query, since the duplicate problems are only apparent at that late stage.
>>> qs = ArticleWithAuthor.objects.order_by('pub_date', 'pk')
>>> sql = qs.query.get_compiler(qs._using).as_sql()[0]
>>> sql = qs.query.get_compiler(qs.db).as_sql()[0]
>>> fragment = sql[sql.find('ORDER BY'):]
>>> pos = fragment.find('pub_date')
>>> fragment.find('pub_date', pos + 1) == -1

View File

@ -3,8 +3,8 @@
"pk": 2,
"model": "multiple_database.book",
"fields": {
"title": "Dive into Python",
"published": "2009-5-4"
"title": "Pro Django",
"published": "2008-12-16"
}
}
]
]

View File

@ -3,8 +3,8 @@
"pk": 2,
"model": "multiple_database.book",
"fields": {
"title": "Pro Django",
"published": "2008-12-16"
"title": "Dive into Python",
"published": "2009-5-4"
}
}
]

View File

@ -20,78 +20,78 @@ class QueryTestCase(TestCase):
def test_default_creation(self):
"Objects created on the default database don't leak onto other databases"
# Create a book on the default database using create()
Book.objects.create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
pro = Book()
pro.title="Pro Django"
pro.published = datetime.date(2008, 12, 16)
pro.save()
dive = Book()
dive.title="Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save()
# Check that book exists on the default database, but not on other database
try:
Book.objects.get(title="Dive into Python")
Book.objects.using('default').get(title="Dive into Python")
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Dive Into Python" should exist on default database')
self.assertRaises(Book.DoesNotExist,
Book.objects.using('other').get,
title="Dive into Python"
title="Pro Django"
)
try:
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
Book.objects.get(title="Dive into Python")
Book.objects.using('default').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on default database')
self.fail('"Dive into Python" should exist on default database')
self.assertRaises(Book.DoesNotExist,
Book.objects.using('other').get,
title="Pro Django"
title="Dive into Python"
)
def test_other_creation(self):
"Objects created on another database don't leak onto the default database"
# Create a book on the second database
Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
# Create a book on the default database using a save
pro = Book()
pro.title="Pro Django"
pro.published = datetime.date(2008, 12, 16)
pro.save(using='other')
dive = Book()
dive.title="Dive into Python"
dive.published = datetime.date(2009, 5, 4)
dive.save(using='other')
# Check that book exists on the default database, but not on other database
try:
Book.objects.using('other').get(title="Dive into Python")
Book.objects.using('other').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Dive Into Python" should exist on other database')
self.assertRaises(Book.DoesNotExist,
Book.objects.get,
title="Dive into Python"
title="Pro Django"
)
self.assertRaises(Book.DoesNotExist,
Book.objects.using('default').get,
title="Dive into Python"
title="Pro Django"
)
try:
Book.objects.using('other').get(title="Pro Django")
Book.objects.using('other').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on other database')
self.fail('"Dive into Python" should exist on other database')
self.assertRaises(Book.DoesNotExist,
Book.objects.get,
title="Pro Django"
title="Dive into Python"
)
self.assertRaises(Book.DoesNotExist,
Book.objects.using('default').get,
title="Pro Django"
title="Dive into Python"
)
def test_basic_queries(self):
@ -126,23 +126,23 @@ class QueryTestCase(TestCase):
months = Book.objects.using('default').dates('published', 'month')
self.assertEqual([o.month for o in months], [])
def test_m2m(self):
def test_m2m_separation(self):
"M2M fields are constrained to a single database"
# Create a book and author on the default database
dive = Book.objects.create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
mark = Author.objects.create(name="Mark Pilgrim")
marty = Author.objects.create(name="Marty Alchin")
# Create a book and author on the other database
pro = Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
marty = Author.objects.using('other').create(name="Marty Alchin")
mark = Author.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors = [mark]
pro.authors = [marty]
dive.authors = [mark]
# Inspect the m2m tables directly.
# There should be 1 entry in each database
@ -150,59 +150,191 @@ class QueryTestCase(TestCase):
self.assertEquals(Book.authors.through.objects.using('other').count(), 1)
# Check that queries work across m2m joins
self.assertEquals(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True),
['Dive into Python'])
self.assertEquals(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True),
self.assertEquals(list(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
[u'Pro Django'])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
[])
self.assertEquals(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True),
self.assertEquals(list(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True),
['Pro Django'])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
def test_foreign_key(self):
def test_m2m_forward_operations(self):
"M2M forward manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Author.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors = [mark]
# Add a second author
john = Author.objects.using('other').create(name="John Smith")
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[])
dive.authors.add(john)
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[u'Dive into Python'])
# Remove the second author
dive.authors.remove(john)
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[])
# Clear all authors
dive.authors.clear()
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
[])
# Create an author through the m2m interface
dive.authors.create(name='Jane Brown')
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Jane Brown').values_list('title', flat=True)),
[u'Dive into Python'])
def test_m2m_reverse_operations(self):
"M2M reverse manipulations are all constrained to a single DB"
# Create a book and author on the other database
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Author.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
dive.authors = [mark]
# Create a second book on the other database
grease = Book.objects.using('other').create(title="Greasemonkey Hacks",
published=datetime.date(2005, 11, 1))
# Add a books to the m2m
mark.book_set.add(grease)
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[u'Mark Pilgrim'])
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
[u'Mark Pilgrim'])
# Remove a book from the m2m
mark.book_set.remove(grease)
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[u'Mark Pilgrim'])
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
[])
# Clear the books associated with mark
mark.book_set.clear()
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[])
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
[])
# Create a book through the m2m interface
mark.book_set.create(title="Dive into HTML5", published=datetime.date(2020, 1, 1))
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
[])
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into HTML5').values_list('name', flat=True)),
[u'Mark Pilgrim'])
def test_foreign_key_separation(self):
"FK fields are constrained to a single database"
# Create a book and author on the default database
dive = Book.objects.create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
pro = Book.objects.create(title="Pro Django",
published=datetime.date(2008, 12, 16))
mark = Author.objects.create(name="Mark Pilgrim")
marty = Author.objects.create(name="Marty Alchin")
# Create a book and author on the other database
pro = Book.objects.using('other').create(title="Pro Django",
published=datetime.date(2008, 12, 16))
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
marty = Author.objects.using('other').create(name="Marty Alchin")
mark = Author.objects.using('other').create(name="Mark Pilgrim")
# Save the author's favourite books
marty.favourite_book = pro
marty.save()
mark.favourite_book = dive
mark.save()
marty.favourite_book = pro
marty.save() # FIXME Should this be save(using=alias)?
marty = Author.objects.using('default').get(name="Marty Alchin")
self.assertEquals(marty.favourite_book.title, "Pro Django")
mark = Author.objects.using('default').get(name="Mark Pilgrim")
mark = Author.objects.using('other').get(name='Mark Pilgrim')
self.assertEquals(mark.favourite_book.title, "Dive into Python")
marty = Author.objects.using('other').get(name='Marty Alchin')
self.assertEquals(marty.favourite_book.title, "Dive into Python")
try:
mark.favourite_book = marty
marty.favourite_book = mark
self.fail("Shouldn't be able to assign across databases")
except Exception: # FIXME - this should be more explicit
pass
# Check that queries work across foreign key joins
self.assertEquals(Book.objects.using('default').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True),
['Dive into Python'])
self.assertEquals(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True),
self.assertEquals(list(Book.objects.using('default').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True)),
[u'Pro Django'])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True)),
[])
self.assertEquals(Book.objects.using('default').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True),
self.assertEquals(list(Book.objects.using('default').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(Book.objects.using('other').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True),
['Pro Django'])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
def test_foreign_key_reverse_operations(self):
"FK reverse manipulations are all constrained to a single DB"
dive = Book.objects.using('other').create(title="Dive into Python",
published=datetime.date(2009, 5, 4))
mark = Author.objects.using('other').create(name="Mark Pilgrim")
# Save the author relations
mark.favourite_book = dive
mark.save()
# Add a second author
john = Author.objects.using('other').create(name="John Smith")
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
[])
dive.favourite_of.add(john)
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
[u'Dive into Python'])
# Remove the second author
dive.favourite_of.remove(john)
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[u'Dive into Python'])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
[])
# Clear all favourite_of
dive.favourite_of.clear()
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
[])
# Create an author through the m2m interface
dive.favourite_of.create(name='Jane Brown')
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
[])
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Jane Brown').values_list('title', flat=True)),
[u'Dive into Python'])
class FixtureTestCase(TestCase):
multi_db = True
@ -210,31 +342,31 @@ class FixtureTestCase(TestCase):
def test_fixture_loading(self):
"Multi-db fixtures are loaded correctly"
# Check that "Dive into Python" exists on the default database, but not on other database
# Check that "Pro Django" exists on the default database, but not on other database
try:
Book.objects.get(title="Dive into Python")
Book.objects.using('default').get(title="Dive into Python")
Book.objects.get(title="Pro Django")
Book.objects.using('default').get(title="Pro Django")
except Book.DoesNotExist:
self.fail('"Dive Into Python" should exist on default database')
self.assertRaises(Book.DoesNotExist,
Book.objects.using('other').get,
title="Dive into Python"
title="Pro Django"
)
# Check that "Pro Django" exists on the default database, but not on other database
# Check that "Dive into Python" exists on the default database, but not on other database
try:
Book.objects.using('other').get(title="Pro Django")
Book.objects.using('other').get(title="Dive into Python")
except Book.DoesNotExist:
self.fail('"Pro Django" should exist on other database')
self.fail('"Dive into Python" should exist on other database')
self.assertRaises(Book.DoesNotExist,
Book.objects.get,
title="Pro Django"
title="Dive into Python"
)
self.assertRaises(Book.DoesNotExist,
Book.objects.using('default').get,
title="Pro Django"
title="Dive into Python"
)
# Check that "Definitive Guide" exists on the both databases
@ -251,6 +383,6 @@ class PickleQuerySetTestCase(TestCase):
def test_pickling(self):
for db in connections:
Book.objects.using(db).create(title='Pro Django', published=datetime.date(2008, 12, 16))
Book.objects.using(db).create(title='Dive into Python', published=datetime.date(2009, 5, 4))
qs = Book.objects.all()
self.assertEqual(qs._using, pickle.loads(pickle.dumps(qs))._using)
self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db)

View File

@ -822,8 +822,8 @@ We can do slicing beyond what is currently in the result cache, too.
Bug #7045 -- extra tables used to crash SQL construction on the second use.
>>> qs = Ranking.objects.extra(tables=['django_site'])
>>> s = qs.query.get_compiler(qs._using).as_sql()
>>> s = qs.query.get_compiler(qs._using).as_sql() # test passes if this doesn't raise an exception.
>>> s = qs.query.get_compiler(qs.db).as_sql()
>>> s = qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception.
Bug #7098 -- Make sure semi-deprecated ordering by related models syntax still
works.
@ -912,9 +912,9 @@ We should also be able to pickle things that use select_related(). The only
tricky thing here is to ensure that we do the related selections properly after
unpickling.
>>> qs = Item.objects.select_related()
>>> query = qs.query.get_compiler(qs._using).as_sql()[0]
>>> query = qs.query.get_compiler(qs.db).as_sql()[0]
>>> query2 = pickle.loads(pickle.dumps(qs.query))
>>> query2.get_compiler(qs._using).as_sql()[0] == query
>>> query2.get_compiler(qs.db).as_sql()[0] == query
True
Check pickling of deferred-loading querysets
@ -1051,7 +1051,7 @@ sufficient that this query runs without error.
Calling order_by() with no parameters removes any existing ordering on the
model. But it should still be possible to add new ordering after that.
>>> qs = Author.objects.order_by().order_by('name')
>>> 'ORDER BY' in qs.query.get_compiler(qs._using).as_sql()[0]
>>> 'ORDER BY' in qs.query.get_compiler(qs.db).as_sql()[0]
True
Incorrect SQL was being generated for certain types of exclude() queries that
@ -1086,7 +1086,7 @@ performance problems on backends like MySQL.
Nested queries should not evaluate the inner query as part of constructing the
SQL (so we should see a nested query here, indicated by two "SELECT" calls).
>>> qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
>>> qs.query.get_compiler(qs._using).as_sql()[0].count('SELECT')
>>> qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT')
2
Bug #10181 -- Avoid raising an EmptyResultSet if an inner query is provably