mirror of
https://github.com/django/django.git
synced 2025-07-05 10:19:20 +00:00
[soc2009/multidb] Made instances sticky to the database that created them.
This involves: * Adding a _state attribute to instances to track instance state * Making db a state attribute, and making the db a public attribute on querysets. Patch from Russell Keith-Magee. git-svn-id: http://code.djangoproject.com/svn/django/branches/soc2009/multidb@11769 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
cec677a782
commit
da909ae3a0
4
TODO
4
TODO
@ -11,14 +11,10 @@ Required for v1.2
|
|||||||
* Should we take the opportunity to modify DB backends to use fully qualified paths?
|
* Should we take the opportunity to modify DB backends to use fully qualified paths?
|
||||||
* Should we clean up DATABASES['DATABASE_NAME'] to DATABASES['NAME'] etc?
|
* Should we clean up DATABASES['DATABASE_NAME'] to DATABASES['NAME'] etc?
|
||||||
* Meta.using? Is is still required/desirable?
|
* Meta.using? Is is still required/desirable?
|
||||||
* Fix the regressiontests/multiple_database test failures
|
|
||||||
* Give instances knowledge of the database from which they were loaded.
|
|
||||||
* Cascade instance using to m2m queries
|
|
||||||
* Cleanup of new API entry points
|
* Cleanup of new API entry points
|
||||||
* validate() on a field
|
* validate() on a field
|
||||||
* name/purpose clash with Honza?
|
* name/purpose clash with Honza?
|
||||||
* any overlap with existing methods?
|
* any overlap with existing methods?
|
||||||
* Accessing _using in BaseModelFormSet.
|
|
||||||
|
|
||||||
Optional for v1.2
|
Optional for v1.2
|
||||||
~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~
|
||||||
|
@ -230,6 +230,13 @@ class ModelBase(type):
|
|||||||
|
|
||||||
signals.class_prepared.send(sender=cls)
|
signals.class_prepared.send(sender=cls)
|
||||||
|
|
||||||
|
class ModelState(object):
|
||||||
|
"""
|
||||||
|
A class for storing instance state
|
||||||
|
"""
|
||||||
|
def __init__(self, db=None):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
class Model(object):
|
class Model(object):
|
||||||
__metaclass__ = ModelBase
|
__metaclass__ = ModelBase
|
||||||
_deferred = False
|
_deferred = False
|
||||||
@ -237,6 +244,9 @@ class Model(object):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
|
signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs)
|
||||||
|
|
||||||
|
# Set up the storage for instane state
|
||||||
|
self._state = ModelState()
|
||||||
|
|
||||||
# There is a rather weird disparity here; if kwargs, it's set, then args
|
# There is a rather weird disparity here; if kwargs, it's set, then args
|
||||||
# overrides it. It should be one or the other; don't duplicate the work
|
# overrides it. It should be one or the other; don't duplicate the work
|
||||||
# The reason for the kwargs check is that standard iterator passes in by
|
# The reason for the kwargs check is that standard iterator passes in by
|
||||||
@ -428,7 +438,7 @@ class Model(object):
|
|||||||
need for overrides of save() to pass around internal-only parameters
|
need for overrides of save() to pass around internal-only parameters
|
||||||
('raw', 'cls', and 'origin').
|
('raw', 'cls', and 'origin').
|
||||||
"""
|
"""
|
||||||
using = using or self._meta.using or DEFAULT_DB_ALIAS
|
using = using or self._state.db or self._meta.using or DEFAULT_DB_ALIAS
|
||||||
connection = connections[using]
|
connection = connections[using]
|
||||||
assert not (force_insert and force_update)
|
assert not (force_insert and force_update)
|
||||||
if cls is None:
|
if cls is None:
|
||||||
@ -514,6 +524,10 @@ class Model(object):
|
|||||||
setattr(self, meta.pk.attname, result)
|
setattr(self, meta.pk.attname, result)
|
||||||
transaction.commit_unless_managed(using=using)
|
transaction.commit_unless_managed(using=using)
|
||||||
|
|
||||||
|
# Store the database on which the object was saved
|
||||||
|
self._state.db = using
|
||||||
|
|
||||||
|
# Signal that the save is complete
|
||||||
if origin and not meta.auto_created:
|
if origin and not meta.auto_created:
|
||||||
signals.post_save.send(sender=origin, instance=self,
|
signals.post_save.send(sender=origin, instance=self,
|
||||||
created=(not record_exists), raw=raw)
|
created=(not record_exists), raw=raw)
|
||||||
@ -577,7 +591,7 @@ class Model(object):
|
|||||||
parent_obj._collect_sub_objects(seen_objs)
|
parent_obj._collect_sub_objects(seen_objs)
|
||||||
|
|
||||||
def delete(self, using=None):
|
def delete(self, using=None):
|
||||||
using = using or self._meta.using or DEFAULT_DB_ALIAS
|
using = using or self._state.db or self._meta.using or DEFAULT_DB_ALIAS
|
||||||
connection = connections[using]
|
connection = connections[using]
|
||||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ class SingleRelatedObjectDescriptor(object):
|
|||||||
return getattr(instance, self.cache_name)
|
return getattr(instance, self.cache_name)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
|
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
|
||||||
rel_obj = self.related.model._base_manager.get(**params)
|
rel_obj = self.related.model._base_manager.using(instance._state.db).get(**params)
|
||||||
setattr(instance, self.cache_name, rel_obj)
|
setattr(instance, self.cache_name, rel_obj)
|
||||||
return rel_obj
|
return rel_obj
|
||||||
|
|
||||||
@ -259,9 +259,9 @@ class ReverseSingleRelatedObjectDescriptor(object):
|
|||||||
# related fields, respect that.
|
# related fields, respect that.
|
||||||
rel_mgr = self.field.rel.to._default_manager
|
rel_mgr = self.field.rel.to._default_manager
|
||||||
if getattr(rel_mgr, 'use_for_related_fields', False):
|
if getattr(rel_mgr, 'use_for_related_fields', False):
|
||||||
rel_obj = rel_mgr.get(**params)
|
rel_obj = rel_mgr.using(instance._state.db).get(**params)
|
||||||
else:
|
else:
|
||||||
rel_obj = QuerySet(self.field.rel.to).get(**params)
|
rel_obj = QuerySet(self.field.rel.to).using(instance._state.db).get(**params)
|
||||||
setattr(instance, cache_name, rel_obj)
|
setattr(instance, cache_name, rel_obj)
|
||||||
return rel_obj
|
return rel_obj
|
||||||
|
|
||||||
@ -359,14 +359,14 @@ class ForeignRelatedObjectsDescriptor(object):
|
|||||||
|
|
||||||
class RelatedManager(superclass):
|
class RelatedManager(superclass):
|
||||||
def get_query_set(self):
|
def get_query_set(self):
|
||||||
return superclass.get_query_set(self).filter(**(self.core_filters))
|
return superclass.get_query_set(self).using(instance._state.db).filter(**(self.core_filters))
|
||||||
|
|
||||||
def add(self, *objs):
|
def add(self, *objs):
|
||||||
for obj in objs:
|
for obj in objs:
|
||||||
if not isinstance(obj, self.model):
|
if not isinstance(obj, self.model):
|
||||||
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
|
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
|
||||||
setattr(obj, rel_field.name, instance)
|
setattr(obj, rel_field.name, instance)
|
||||||
obj.save()
|
obj.save(using=instance._state.db)
|
||||||
add.alters_data = True
|
add.alters_data = True
|
||||||
|
|
||||||
def create(self, **kwargs):
|
def create(self, **kwargs):
|
||||||
@ -378,7 +378,7 @@ class ForeignRelatedObjectsDescriptor(object):
|
|||||||
# Update kwargs with the related object that this
|
# Update kwargs with the related object that this
|
||||||
# ForeignRelatedObjectsDescriptor knows about.
|
# ForeignRelatedObjectsDescriptor knows about.
|
||||||
kwargs.update({rel_field.name: instance})
|
kwargs.update({rel_field.name: instance})
|
||||||
return super(RelatedManager, self).get_or_create(**kwargs)
|
return super(RelatedManager, self).using(instance._state.db).get_or_create(**kwargs)
|
||||||
get_or_create.alters_data = True
|
get_or_create.alters_data = True
|
||||||
|
|
||||||
# remove() and clear() are only provided if the ForeignKey can have a value of null.
|
# remove() and clear() are only provided if the ForeignKey can have a value of null.
|
||||||
@ -389,7 +389,7 @@ class ForeignRelatedObjectsDescriptor(object):
|
|||||||
# Is obj actually part of this descriptor set?
|
# Is obj actually part of this descriptor set?
|
||||||
if getattr(obj, rel_field.attname) == val:
|
if getattr(obj, rel_field.attname) == val:
|
||||||
setattr(obj, rel_field.name, None)
|
setattr(obj, rel_field.name, None)
|
||||||
obj.save()
|
obj.save(using=instance._state.db)
|
||||||
else:
|
else:
|
||||||
raise rel_field.rel.to.DoesNotExist, "%r is not related to %r." % (obj, instance)
|
raise rel_field.rel.to.DoesNotExist, "%r is not related to %r." % (obj, instance)
|
||||||
remove.alters_data = True
|
remove.alters_data = True
|
||||||
@ -397,7 +397,7 @@ class ForeignRelatedObjectsDescriptor(object):
|
|||||||
def clear(self):
|
def clear(self):
|
||||||
for obj in self.all():
|
for obj in self.all():
|
||||||
setattr(obj, rel_field.name, None)
|
setattr(obj, rel_field.name, None)
|
||||||
obj.save()
|
obj.save(using=instance._state.db)
|
||||||
clear.alters_data = True
|
clear.alters_data = True
|
||||||
|
|
||||||
manager = RelatedManager()
|
manager = RelatedManager()
|
||||||
@ -463,14 +463,14 @@ def create_many_related_manager(superclass, rel=False):
|
|||||||
if not rel.through._meta.auto_created:
|
if not rel.through._meta.auto_created:
|
||||||
opts = through._meta
|
opts = through._meta
|
||||||
raise AttributeError, "Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)
|
raise AttributeError, "Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name)
|
||||||
new_obj = super(ManyRelatedManager, self).create(**kwargs)
|
new_obj = super(ManyRelatedManager, self).using(self.instance._state.db).create(**kwargs)
|
||||||
self.add(new_obj)
|
self.add(new_obj)
|
||||||
return new_obj
|
return new_obj
|
||||||
create.alters_data = True
|
create.alters_data = True
|
||||||
|
|
||||||
def get_or_create(self, **kwargs):
|
def get_or_create(self, **kwargs):
|
||||||
obj, created = \
|
obj, created = \
|
||||||
super(ManyRelatedManager, self).get_or_create(**kwargs)
|
super(ManyRelatedManager, self).using(self.instance._state.db).get_or_create(**kwargs)
|
||||||
# We only need to add() if created because if we got an object back
|
# We only need to add() if created because if we got an object back
|
||||||
# from get() then the relationship already exists.
|
# from get() then the relationship already exists.
|
||||||
if created:
|
if created:
|
||||||
@ -495,7 +495,7 @@ def create_many_related_manager(superclass, rel=False):
|
|||||||
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
|
raise TypeError, "'%s' instance expected" % self.model._meta.object_name
|
||||||
else:
|
else:
|
||||||
new_ids.add(obj)
|
new_ids.add(obj)
|
||||||
vals = self.through._default_manager.values_list(target_field_name, flat=True)
|
vals = self.through._default_manager.using(self.instance._state.db).values_list(target_field_name, flat=True)
|
||||||
vals = vals.filter(**{
|
vals = vals.filter(**{
|
||||||
source_field_name: self._pk_val,
|
source_field_name: self._pk_val,
|
||||||
'%s__in' % target_field_name: new_ids,
|
'%s__in' % target_field_name: new_ids,
|
||||||
@ -504,7 +504,7 @@ def create_many_related_manager(superclass, rel=False):
|
|||||||
|
|
||||||
# Add the ones that aren't there already
|
# Add the ones that aren't there already
|
||||||
for obj_id in (new_ids - vals):
|
for obj_id in (new_ids - vals):
|
||||||
self.through._default_manager.create(**{
|
self.through._default_manager.using(self.instance._state.db).create(**{
|
||||||
'%s_id' % source_field_name: self._pk_val,
|
'%s_id' % source_field_name: self._pk_val,
|
||||||
'%s_id' % target_field_name: obj_id,
|
'%s_id' % target_field_name: obj_id,
|
||||||
})
|
})
|
||||||
@ -524,14 +524,14 @@ def create_many_related_manager(superclass, rel=False):
|
|||||||
else:
|
else:
|
||||||
old_ids.add(obj)
|
old_ids.add(obj)
|
||||||
# Remove the specified objects from the join table
|
# Remove the specified objects from the join table
|
||||||
self.through._default_manager.filter(**{
|
self.through._default_manager.using(self.instance._state.db).filter(**{
|
||||||
source_field_name: self._pk_val,
|
source_field_name: self._pk_val,
|
||||||
'%s__in' % target_field_name: old_ids
|
'%s__in' % target_field_name: old_ids
|
||||||
}).delete()
|
}).delete()
|
||||||
|
|
||||||
def _clear_items(self, source_field_name):
|
def _clear_items(self, source_field_name):
|
||||||
# source_col_name: the PK colname in join_table for the source object
|
# source_col_name: the PK colname in join_table for the source object
|
||||||
self.through._default_manager.filter(**{
|
self.through._default_manager.using(self.instance._state.db).filter(**{
|
||||||
source_field_name: self._pk_val
|
source_field_name: self._pk_val
|
||||||
}).delete()
|
}).delete()
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ class QuerySet(object):
|
|||||||
self._result_cache = None
|
self._result_cache = None
|
||||||
self._iter = None
|
self._iter = None
|
||||||
self._sticky_filter = False
|
self._sticky_filter = False
|
||||||
self._using = using
|
self.db = using
|
||||||
|
|
||||||
########################
|
########################
|
||||||
# PYTHON MAGIC METHODS #
|
# PYTHON MAGIC METHODS #
|
||||||
@ -237,7 +237,7 @@ class QuerySet(object):
|
|||||||
init_list.append(field.attname)
|
init_list.append(field.attname)
|
||||||
model_cls = deferred_class_factory(self.model, skip)
|
model_cls = deferred_class_factory(self.model, skip)
|
||||||
|
|
||||||
compiler = self.query.get_compiler(using=self._using)
|
compiler = self.query.get_compiler(using=self.db)
|
||||||
for row in compiler.results_iter():
|
for row in compiler.results_iter():
|
||||||
if fill_cache:
|
if fill_cache:
|
||||||
obj, _ = get_cached_row(self.model, row,
|
obj, _ = get_cached_row(self.model, row,
|
||||||
@ -260,6 +260,9 @@ class QuerySet(object):
|
|||||||
for i, aggregate in enumerate(aggregate_select):
|
for i, aggregate in enumerate(aggregate_select):
|
||||||
setattr(obj, aggregate, row[i+aggregate_start])
|
setattr(obj, aggregate, row[i+aggregate_start])
|
||||||
|
|
||||||
|
# Store the source database of the object
|
||||||
|
obj._state.db = self.db
|
||||||
|
|
||||||
yield obj
|
yield obj
|
||||||
|
|
||||||
def aggregate(self, *args, **kwargs):
|
def aggregate(self, *args, **kwargs):
|
||||||
@ -279,7 +282,7 @@ class QuerySet(object):
|
|||||||
query.add_aggregate(aggregate_expr, self.model, alias,
|
query.add_aggregate(aggregate_expr, self.model, alias,
|
||||||
is_summary=True)
|
is_summary=True)
|
||||||
|
|
||||||
return query.get_aggregation(using=self._using)
|
return query.get_aggregation(using=self.db)
|
||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
"""
|
"""
|
||||||
@ -292,7 +295,7 @@ class QuerySet(object):
|
|||||||
if self._result_cache is not None and not self._iter:
|
if self._result_cache is not None and not self._iter:
|
||||||
return len(self._result_cache)
|
return len(self._result_cache)
|
||||||
|
|
||||||
return self.query.get_count(using=self._using)
|
return self.query.get_count(using=self.db)
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
def get(self, *args, **kwargs):
|
||||||
"""
|
"""
|
||||||
@ -315,7 +318,7 @@ class QuerySet(object):
|
|||||||
and returning the created object.
|
and returning the created object.
|
||||||
"""
|
"""
|
||||||
obj = self.model(**kwargs)
|
obj = self.model(**kwargs)
|
||||||
obj.save(force_insert=True, using=self._using)
|
obj.save(force_insert=True, using=self.db)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def get_or_create(self, **kwargs):
|
def get_or_create(self, **kwargs):
|
||||||
@ -334,12 +337,12 @@ class QuerySet(object):
|
|||||||
params = dict([(k, v) for k, v in kwargs.items() if '__' not in k])
|
params = dict([(k, v) for k, v in kwargs.items() if '__' not in k])
|
||||||
params.update(defaults)
|
params.update(defaults)
|
||||||
obj = self.model(**params)
|
obj = self.model(**params)
|
||||||
sid = transaction.savepoint(using=self._using)
|
sid = transaction.savepoint(using=self.db)
|
||||||
obj.save(force_insert=True, using=self._using)
|
obj.save(force_insert=True, using=self.db)
|
||||||
transaction.savepoint_commit(sid, using=self._using)
|
transaction.savepoint_commit(sid, using=self.db)
|
||||||
return obj, True
|
return obj, True
|
||||||
except IntegrityError, e:
|
except IntegrityError, e:
|
||||||
transaction.savepoint_rollback(sid, using=self._using)
|
transaction.savepoint_rollback(sid, using=self.db)
|
||||||
try:
|
try:
|
||||||
return self.get(**kwargs), False
|
return self.get(**kwargs), False
|
||||||
except self.model.DoesNotExist:
|
except self.model.DoesNotExist:
|
||||||
@ -399,7 +402,7 @@ class QuerySet(object):
|
|||||||
|
|
||||||
if not seen_objs:
|
if not seen_objs:
|
||||||
break
|
break
|
||||||
delete_objects(seen_objs, del_query._using)
|
delete_objects(seen_objs, del_query.db)
|
||||||
|
|
||||||
# Clear the result cache, in case this QuerySet gets reused.
|
# Clear the result cache, in case this QuerySet gets reused.
|
||||||
self._result_cache = None
|
self._result_cache = None
|
||||||
@ -414,20 +417,20 @@ class QuerySet(object):
|
|||||||
"Cannot update a query once a slice has been taken."
|
"Cannot update a query once a slice has been taken."
|
||||||
query = self.query.clone(sql.UpdateQuery)
|
query = self.query.clone(sql.UpdateQuery)
|
||||||
query.add_update_values(kwargs)
|
query.add_update_values(kwargs)
|
||||||
if not transaction.is_managed(using=self._using):
|
if not transaction.is_managed(using=self.db):
|
||||||
transaction.enter_transaction_management(using=self._using)
|
transaction.enter_transaction_management(using=self.db)
|
||||||
forced_managed = True
|
forced_managed = True
|
||||||
else:
|
else:
|
||||||
forced_managed = False
|
forced_managed = False
|
||||||
try:
|
try:
|
||||||
rows = query.get_compiler(self._using).execute_sql(None)
|
rows = query.get_compiler(self.db).execute_sql(None)
|
||||||
if forced_managed:
|
if forced_managed:
|
||||||
transaction.commit(using=self._using)
|
transaction.commit(using=self.db)
|
||||||
else:
|
else:
|
||||||
transaction.commit_unless_managed(using=self._using)
|
transaction.commit_unless_managed(using=self.db)
|
||||||
finally:
|
finally:
|
||||||
if forced_managed:
|
if forced_managed:
|
||||||
transaction.leave_transaction_management(using=self._using)
|
transaction.leave_transaction_management(using=self.db)
|
||||||
self._result_cache = None
|
self._result_cache = None
|
||||||
return rows
|
return rows
|
||||||
update.alters_data = True
|
update.alters_data = True
|
||||||
@ -444,12 +447,12 @@ class QuerySet(object):
|
|||||||
query = self.query.clone(sql.UpdateQuery)
|
query = self.query.clone(sql.UpdateQuery)
|
||||||
query.add_update_fields(values)
|
query.add_update_fields(values)
|
||||||
self._result_cache = None
|
self._result_cache = None
|
||||||
return query.get_compiler(self._using).execute_sql(None)
|
return query.get_compiler(self.db).execute_sql(None)
|
||||||
_update.alters_data = True
|
_update.alters_data = True
|
||||||
|
|
||||||
def exists(self):
|
def exists(self):
|
||||||
if self._result_cache is None:
|
if self._result_cache is None:
|
||||||
return self.query.has_results(using=self._using)
|
return self.query.has_results(using=self.db)
|
||||||
return bool(self._result_cache)
|
return bool(self._result_cache)
|
||||||
|
|
||||||
##################################################
|
##################################################
|
||||||
@ -661,7 +664,7 @@ class QuerySet(object):
|
|||||||
Selects which database this QuerySet should excecute it's query against.
|
Selects which database this QuerySet should excecute it's query against.
|
||||||
"""
|
"""
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
clone._using = alias
|
clone.db = alias
|
||||||
return clone
|
return clone
|
||||||
|
|
||||||
###################################
|
###################################
|
||||||
@ -692,7 +695,7 @@ class QuerySet(object):
|
|||||||
if self._sticky_filter:
|
if self._sticky_filter:
|
||||||
query.filter_is_sticky = True
|
query.filter_is_sticky = True
|
||||||
c = klass(model=self.model, query=query)
|
c = klass(model=self.model, query=query)
|
||||||
c._using = self._using
|
c.db = self.db
|
||||||
c.__dict__.update(kwargs)
|
c.__dict__.update(kwargs)
|
||||||
if setup and hasattr(c, '_setup_query'):
|
if setup and hasattr(c, '_setup_query'):
|
||||||
c._setup_query()
|
c._setup_query()
|
||||||
@ -747,7 +750,7 @@ class QuerySet(object):
|
|||||||
Returns the internal query's SQL and parameters (as a tuple).
|
Returns the internal query's SQL and parameters (as a tuple).
|
||||||
"""
|
"""
|
||||||
obj = self.values("pk")
|
obj = self.values("pk")
|
||||||
if connection == connections[obj._using]:
|
if connection == connections[obj.db]:
|
||||||
return obj.query.get_compiler(connection=connection).as_nested_sql()
|
return obj.query.get_compiler(connection=connection).as_nested_sql()
|
||||||
raise ValueError("Can't do subqueries with queries on different DBs.")
|
raise ValueError("Can't do subqueries with queries on different DBs.")
|
||||||
|
|
||||||
@ -779,7 +782,7 @@ class ValuesQuerySet(QuerySet):
|
|||||||
|
|
||||||
names = extra_names + field_names + aggregate_names
|
names = extra_names + field_names + aggregate_names
|
||||||
|
|
||||||
for row in self.query.get_compiler(self._using).results_iter():
|
for row in self.query.get_compiler(self.db).results_iter():
|
||||||
yield dict(zip(names, row))
|
yield dict(zip(names, row))
|
||||||
|
|
||||||
def _setup_query(self):
|
def _setup_query(self):
|
||||||
@ -876,7 +879,7 @@ class ValuesQuerySet(QuerySet):
|
|||||||
% self.__class__.__name__)
|
% self.__class__.__name__)
|
||||||
|
|
||||||
obj = self._clone()
|
obj = self._clone()
|
||||||
if connection == connections[obj._using]:
|
if connection == connections[obj.db]:
|
||||||
return obj.query.get_compiler(connection=connection).as_nested_sql()
|
return obj.query.get_compiler(connection=connection).as_nested_sql()
|
||||||
raise ValueError("Can't do subqueries with queries on different DBs.")
|
raise ValueError("Can't do subqueries with queries on different DBs.")
|
||||||
|
|
||||||
@ -894,10 +897,10 @@ class ValuesQuerySet(QuerySet):
|
|||||||
class ValuesListQuerySet(ValuesQuerySet):
|
class ValuesListQuerySet(ValuesQuerySet):
|
||||||
def iterator(self):
|
def iterator(self):
|
||||||
if self.flat and len(self._fields) == 1:
|
if self.flat and len(self._fields) == 1:
|
||||||
for row in self.query.get_compiler(self._using).results_iter():
|
for row in self.query.get_compiler(self.db).results_iter():
|
||||||
yield row[0]
|
yield row[0]
|
||||||
elif not self.query.extra_select and not self.query.aggregate_select:
|
elif not self.query.extra_select and not self.query.aggregate_select:
|
||||||
for row in self.query.get_compiler(self._using).results_iter():
|
for row in self.query.get_compiler(self.db).results_iter():
|
||||||
yield tuple(row)
|
yield tuple(row)
|
||||||
else:
|
else:
|
||||||
# When extra(select=...) or an annotation is involved, the extra
|
# When extra(select=...) or an annotation is involved, the extra
|
||||||
@ -916,7 +919,7 @@ class ValuesListQuerySet(ValuesQuerySet):
|
|||||||
else:
|
else:
|
||||||
fields = names
|
fields = names
|
||||||
|
|
||||||
for row in self.query.get_compiler(self._using).results_iter():
|
for row in self.query.get_compiler(self.db).results_iter():
|
||||||
data = dict(zip(names, row))
|
data = dict(zip(names, row))
|
||||||
yield tuple([data[f] for f in fields])
|
yield tuple([data[f] for f in fields])
|
||||||
|
|
||||||
@ -928,7 +931,7 @@ class ValuesListQuerySet(ValuesQuerySet):
|
|||||||
|
|
||||||
class DateQuerySet(QuerySet):
|
class DateQuerySet(QuerySet):
|
||||||
def iterator(self):
|
def iterator(self):
|
||||||
return self.query.get_compiler(self._using).results_iter()
|
return self.query.get_compiler(self.db).results_iter()
|
||||||
|
|
||||||
def _setup_query(self):
|
def _setup_query(self):
|
||||||
"""
|
"""
|
||||||
|
@ -472,7 +472,7 @@ class BaseModelFormSet(BaseFormSet):
|
|||||||
pk = self.data[pk_key]
|
pk = self.data[pk_key]
|
||||||
pk_field = self.model._meta.pk
|
pk_field = self.model._meta.pk
|
||||||
pk = pk_field.get_db_prep_lookup('exact', pk,
|
pk = pk_field.get_db_prep_lookup('exact', pk,
|
||||||
connection=connections[self.get_queryset()._using])
|
connection=connections[self.get_queryset().db])
|
||||||
if isinstance(pk, list):
|
if isinstance(pk, list):
|
||||||
pk = pk[0]
|
pk = pk[0]
|
||||||
kwargs['instance'] = self._existing_object(pk)
|
kwargs['instance'] = self._existing_object(pk)
|
||||||
|
@ -157,7 +157,7 @@ False
|
|||||||
|
|
||||||
# The underlying query only makes one join when a related table is referenced twice.
|
# The underlying query only makes one join when a related table is referenced twice.
|
||||||
>>> queryset = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
|
>>> queryset = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
|
||||||
>>> sql = queryset.query.get_compiler(queryset._using).as_sql()[0]
|
>>> sql = queryset.query.get_compiler(queryset.db).as_sql()[0]
|
||||||
>>> sql.count('INNER JOIN')
|
>>> sql.count('INNER JOIN')
|
||||||
1
|
1
|
||||||
|
|
||||||
|
@ -250,10 +250,10 @@ FieldError: Cannot resolve keyword 'foo' into field. Choices are: authors, conta
|
|||||||
>>> out = pickle.dumps(qs)
|
>>> out = pickle.dumps(qs)
|
||||||
|
|
||||||
# Then check that the round trip works.
|
# Then check that the round trip works.
|
||||||
>>> query = qs.query.get_compiler(qs._using).as_sql()[0]
|
>>> query = qs.query.get_compiler(qs.db).as_sql()[0]
|
||||||
>>> select_fields = qs.query.select_fields
|
>>> select_fields = qs.query.select_fields
|
||||||
>>> query2 = pickle.loads(pickle.dumps(qs))
|
>>> query2 = pickle.loads(pickle.dumps(qs))
|
||||||
>>> query2.query.get_compiler(query2._using).as_sql()[0] == query
|
>>> query2.query.get_compiler(query2.db).as_sql()[0] == query
|
||||||
True
|
True
|
||||||
>>> query2.query.select_fields = select_fields
|
>>> query2.query.select_fields = select_fields
|
||||||
|
|
||||||
|
@ -315,7 +315,7 @@ DoesNotExist: ArticleWithAuthor matching query does not exist.
|
|||||||
# Regression test for #9390. This necessarily pokes at the SQL string for the
|
# Regression test for #9390. This necessarily pokes at the SQL string for the
|
||||||
# query, since the duplicate problems are only apparent at that late stage.
|
# query, since the duplicate problems are only apparent at that late stage.
|
||||||
>>> qs = ArticleWithAuthor.objects.order_by('pub_date', 'pk')
|
>>> qs = ArticleWithAuthor.objects.order_by('pub_date', 'pk')
|
||||||
>>> sql = qs.query.get_compiler(qs._using).as_sql()[0]
|
>>> sql = qs.query.get_compiler(qs.db).as_sql()[0]
|
||||||
>>> fragment = sql[sql.find('ORDER BY'):]
|
>>> fragment = sql[sql.find('ORDER BY'):]
|
||||||
>>> pos = fragment.find('pub_date')
|
>>> pos = fragment.find('pub_date')
|
||||||
>>> fragment.find('pub_date', pos + 1) == -1
|
>>> fragment.find('pub_date', pos + 1) == -1
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
"pk": 2,
|
"pk": 2,
|
||||||
"model": "multiple_database.book",
|
"model": "multiple_database.book",
|
||||||
"fields": {
|
"fields": {
|
||||||
"title": "Dive into Python",
|
"title": "Pro Django",
|
||||||
"published": "2009-5-4"
|
"published": "2008-12-16"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
@ -3,8 +3,8 @@
|
|||||||
"pk": 2,
|
"pk": 2,
|
||||||
"model": "multiple_database.book",
|
"model": "multiple_database.book",
|
||||||
"fields": {
|
"fields": {
|
||||||
"title": "Pro Django",
|
"title": "Dive into Python",
|
||||||
"published": "2008-12-16"
|
"published": "2009-5-4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
@ -20,78 +20,78 @@ class QueryTestCase(TestCase):
|
|||||||
def test_default_creation(self):
|
def test_default_creation(self):
|
||||||
"Objects created on the default database don't leak onto other databases"
|
"Objects created on the default database don't leak onto other databases"
|
||||||
# Create a book on the default database using create()
|
# Create a book on the default database using create()
|
||||||
Book.objects.create(title="Dive into Python",
|
Book.objects.create(title="Pro Django",
|
||||||
published=datetime.date(2009, 5, 4))
|
published=datetime.date(2008, 12, 16))
|
||||||
|
|
||||||
# Create a book on the default database using a save
|
# Create a book on the default database using a save
|
||||||
pro = Book()
|
dive = Book()
|
||||||
pro.title="Pro Django"
|
dive.title="Dive into Python"
|
||||||
pro.published = datetime.date(2008, 12, 16)
|
dive.published = datetime.date(2009, 5, 4)
|
||||||
pro.save()
|
dive.save()
|
||||||
|
|
||||||
# Check that book exists on the default database, but not on other database
|
# Check that book exists on the default database, but not on other database
|
||||||
try:
|
try:
|
||||||
Book.objects.get(title="Dive into Python")
|
Book.objects.get(title="Pro Django")
|
||||||
Book.objects.using('default').get(title="Dive into Python")
|
Book.objects.using('default').get(title="Pro Django")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Dive Into Python" should exist on default database')
|
self.fail('"Dive Into Python" should exist on default database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('other').get,
|
Book.objects.using('other').get,
|
||||||
title="Dive into Python"
|
title="Pro Django"
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Book.objects.get(title="Pro Django")
|
Book.objects.get(title="Dive into Python")
|
||||||
Book.objects.using('default').get(title="Pro Django")
|
Book.objects.using('default').get(title="Dive into Python")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Pro Django" should exist on default database')
|
self.fail('"Dive into Python" should exist on default database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('other').get,
|
Book.objects.using('other').get,
|
||||||
title="Pro Django"
|
title="Dive into Python"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_other_creation(self):
|
def test_other_creation(self):
|
||||||
"Objects created on another database don't leak onto the default database"
|
"Objects created on another database don't leak onto the default database"
|
||||||
# Create a book on the second database
|
# Create a book on the second database
|
||||||
Book.objects.using('other').create(title="Dive into Python",
|
Book.objects.using('other').create(title="Pro Django",
|
||||||
published=datetime.date(2009, 5, 4))
|
published=datetime.date(2008, 12, 16))
|
||||||
|
|
||||||
# Create a book on the default database using a save
|
# Create a book on the default database using a save
|
||||||
pro = Book()
|
dive = Book()
|
||||||
pro.title="Pro Django"
|
dive.title="Dive into Python"
|
||||||
pro.published = datetime.date(2008, 12, 16)
|
dive.published = datetime.date(2009, 5, 4)
|
||||||
pro.save(using='other')
|
dive.save(using='other')
|
||||||
|
|
||||||
# Check that book exists on the default database, but not on other database
|
# Check that book exists on the default database, but not on other database
|
||||||
try:
|
try:
|
||||||
Book.objects.using('other').get(title="Dive into Python")
|
Book.objects.using('other').get(title="Pro Django")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Dive Into Python" should exist on other database')
|
self.fail('"Dive Into Python" should exist on other database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.get,
|
Book.objects.get,
|
||||||
title="Dive into Python"
|
title="Pro Django"
|
||||||
)
|
)
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('default').get,
|
Book.objects.using('default').get,
|
||||||
title="Dive into Python"
|
title="Pro Django"
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Book.objects.using('other').get(title="Pro Django")
|
Book.objects.using('other').get(title="Dive into Python")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Pro Django" should exist on other database')
|
self.fail('"Dive into Python" should exist on other database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.get,
|
Book.objects.get,
|
||||||
title="Pro Django"
|
title="Dive into Python"
|
||||||
)
|
)
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('default').get,
|
Book.objects.using('default').get,
|
||||||
title="Pro Django"
|
title="Dive into Python"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_basic_queries(self):
|
def test_basic_queries(self):
|
||||||
@ -126,23 +126,23 @@ class QueryTestCase(TestCase):
|
|||||||
months = Book.objects.using('default').dates('published', 'month')
|
months = Book.objects.using('default').dates('published', 'month')
|
||||||
self.assertEqual([o.month for o in months], [])
|
self.assertEqual([o.month for o in months], [])
|
||||||
|
|
||||||
def test_m2m(self):
|
def test_m2m_separation(self):
|
||||||
"M2M fields are constrained to a single database"
|
"M2M fields are constrained to a single database"
|
||||||
# Create a book and author on the default database
|
# Create a book and author on the default database
|
||||||
dive = Book.objects.create(title="Dive into Python",
|
pro = Book.objects.create(title="Pro Django",
|
||||||
published=datetime.date(2009, 5, 4))
|
published=datetime.date(2008, 12, 16))
|
||||||
|
|
||||||
mark = Author.objects.create(name="Mark Pilgrim")
|
marty = Author.objects.create(name="Marty Alchin")
|
||||||
|
|
||||||
# Create a book and author on the other database
|
# Create a book and author on the other database
|
||||||
pro = Book.objects.using('other').create(title="Pro Django",
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
published=datetime.date(2008, 12, 16))
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
marty = Author.objects.using('other').create(name="Marty Alchin")
|
mark = Author.objects.using('other').create(name="Mark Pilgrim")
|
||||||
|
|
||||||
# Save the author relations
|
# Save the author relations
|
||||||
dive.authors = [mark]
|
|
||||||
pro.authors = [marty]
|
pro.authors = [marty]
|
||||||
|
dive.authors = [mark]
|
||||||
|
|
||||||
# Inspect the m2m tables directly.
|
# Inspect the m2m tables directly.
|
||||||
# There should be 1 entry in each database
|
# There should be 1 entry in each database
|
||||||
@ -150,59 +150,191 @@ class QueryTestCase(TestCase):
|
|||||||
self.assertEquals(Book.authors.through.objects.using('other').count(), 1)
|
self.assertEquals(Book.authors.through.objects.using('other').count(), 1)
|
||||||
|
|
||||||
# Check that queries work across m2m joins
|
# Check that queries work across m2m joins
|
||||||
self.assertEquals(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
|
||||||
['Dive into Python'])
|
[u'Pro Django'])
|
||||||
self.assertEquals(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True)),
|
||||||
[])
|
[])
|
||||||
|
|
||||||
self.assertEquals(Book.objects.using('default').filter(authors__name='Marty Alchin').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('default').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
[])
|
[])
|
||||||
self.assertEquals(Book.objects.using('other').filter(authors__name='Marty Alchin').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
['Pro Django'])
|
[u'Dive into Python'])
|
||||||
|
|
||||||
def test_foreign_key(self):
|
def test_m2m_forward_operations(self):
|
||||||
|
"M2M forward manipulations are all constrained to a single DB"
|
||||||
|
# Create a book and author on the other database
|
||||||
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
|
mark = Author.objects.using('other').create(name="Mark Pilgrim")
|
||||||
|
|
||||||
|
# Save the author relations
|
||||||
|
dive.authors = [mark]
|
||||||
|
|
||||||
|
# Add a second author
|
||||||
|
john = Author.objects.using('other').create(name="John Smith")
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
|
||||||
|
dive.authors.add(john)
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
|
||||||
|
# Remove the second author
|
||||||
|
dive.authors.remove(john)
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Clear all authors
|
||||||
|
dive.authors.clear()
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Create an author through the m2m interface
|
||||||
|
dive.authors.create(name='Jane Brown')
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(authors__name='Jane Brown').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
|
||||||
|
def test_m2m_reverse_operations(self):
|
||||||
|
"M2M reverse manipulations are all constrained to a single DB"
|
||||||
|
# Create a book and author on the other database
|
||||||
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
|
mark = Author.objects.using('other').create(name="Mark Pilgrim")
|
||||||
|
|
||||||
|
# Save the author relations
|
||||||
|
dive.authors = [mark]
|
||||||
|
|
||||||
|
# Create a second book on the other database
|
||||||
|
grease = Book.objects.using('other').create(title="Greasemonkey Hacks",
|
||||||
|
published=datetime.date(2005, 11, 1))
|
||||||
|
|
||||||
|
# Add a books to the m2m
|
||||||
|
mark.book_set.add(grease)
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
||||||
|
[u'Mark Pilgrim'])
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
|
||||||
|
[u'Mark Pilgrim'])
|
||||||
|
|
||||||
|
# Remove a book from the m2m
|
||||||
|
mark.book_set.remove(grease)
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
||||||
|
[u'Mark Pilgrim'])
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Clear the books associated with mark
|
||||||
|
mark.book_set.clear()
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Greasemonkey Hacks').values_list('name', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Create a book through the m2m interface
|
||||||
|
mark.book_set.create(title="Dive into HTML5", published=datetime.date(2020, 1, 1))
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into Python').values_list('name', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Author.objects.using('other').filter(book__title='Dive into HTML5').values_list('name', flat=True)),
|
||||||
|
[u'Mark Pilgrim'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_foreign_key_separation(self):
|
||||||
"FK fields are constrained to a single database"
|
"FK fields are constrained to a single database"
|
||||||
# Create a book and author on the default database
|
# Create a book and author on the default database
|
||||||
dive = Book.objects.create(title="Dive into Python",
|
pro = Book.objects.create(title="Pro Django",
|
||||||
published=datetime.date(2009, 5, 4))
|
published=datetime.date(2008, 12, 16))
|
||||||
|
|
||||||
mark = Author.objects.create(name="Mark Pilgrim")
|
marty = Author.objects.create(name="Marty Alchin")
|
||||||
|
|
||||||
# Create a book and author on the other database
|
# Create a book and author on the other database
|
||||||
pro = Book.objects.using('other').create(title="Pro Django",
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
published=datetime.date(2008, 12, 16))
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
marty = Author.objects.using('other').create(name="Marty Alchin")
|
mark = Author.objects.using('other').create(name="Mark Pilgrim")
|
||||||
|
|
||||||
# Save the author's favourite books
|
# Save the author's favourite books
|
||||||
|
marty.favourite_book = pro
|
||||||
|
marty.save()
|
||||||
|
|
||||||
mark.favourite_book = dive
|
mark.favourite_book = dive
|
||||||
mark.save()
|
mark.save()
|
||||||
|
|
||||||
marty.favourite_book = pro
|
marty = Author.objects.using('default').get(name="Marty Alchin")
|
||||||
marty.save() # FIXME Should this be save(using=alias)?
|
self.assertEquals(marty.favourite_book.title, "Pro Django")
|
||||||
|
|
||||||
mark = Author.objects.using('default').get(name="Mark Pilgrim")
|
mark = Author.objects.using('other').get(name='Mark Pilgrim')
|
||||||
self.assertEquals(mark.favourite_book.title, "Dive into Python")
|
self.assertEquals(mark.favourite_book.title, "Dive into Python")
|
||||||
|
|
||||||
marty = Author.objects.using('other').get(name='Marty Alchin')
|
|
||||||
self.assertEquals(marty.favourite_book.title, "Dive into Python")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
mark.favourite_book = marty
|
marty.favourite_book = mark
|
||||||
self.fail("Shouldn't be able to assign across databases")
|
self.fail("Shouldn't be able to assign across databases")
|
||||||
except Exception: # FIXME - this should be more explicit
|
except Exception: # FIXME - this should be more explicit
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Check that queries work across foreign key joins
|
# Check that queries work across foreign key joins
|
||||||
self.assertEquals(Book.objects.using('default').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('default').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True)),
|
||||||
['Dive into Python'])
|
[u'Pro Django'])
|
||||||
self.assertEquals(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True)),
|
||||||
[])
|
[])
|
||||||
|
|
||||||
self.assertEquals(Book.objects.using('default').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('default').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
[])
|
[])
|
||||||
self.assertEquals(Book.objects.using('other').filter(favourite_of__name='Marty Alchin').values_list('title', flat=True),
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
['Pro Django'])
|
[u'Dive into Python'])
|
||||||
|
|
||||||
|
def test_foreign_key_reverse_operations(self):
|
||||||
|
"FK reverse manipulations are all constrained to a single DB"
|
||||||
|
dive = Book.objects.using('other').create(title="Dive into Python",
|
||||||
|
published=datetime.date(2009, 5, 4))
|
||||||
|
|
||||||
|
mark = Author.objects.using('other').create(name="Mark Pilgrim")
|
||||||
|
|
||||||
|
# Save the author relations
|
||||||
|
mark.favourite_book = dive
|
||||||
|
mark.save()
|
||||||
|
|
||||||
|
# Add a second author
|
||||||
|
john = Author.objects.using('other').create(name="John Smith")
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
|
||||||
|
dive.favourite_of.add(john)
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
|
||||||
|
# Remove the second author
|
||||||
|
dive.favourite_of.remove(john)
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Clear all favourite_of
|
||||||
|
dive.favourite_of.clear()
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='John Smith').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
|
||||||
|
# Create an author through the m2m interface
|
||||||
|
dive.favourite_of.create(name='Jane Brown')
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Mark Pilgrim').values_list('title', flat=True)),
|
||||||
|
[])
|
||||||
|
self.assertEquals(list(Book.objects.using('other').filter(favourite_of__name='Jane Brown').values_list('title', flat=True)),
|
||||||
|
[u'Dive into Python'])
|
||||||
|
|
||||||
class FixtureTestCase(TestCase):
|
class FixtureTestCase(TestCase):
|
||||||
multi_db = True
|
multi_db = True
|
||||||
@ -210,31 +342,31 @@ class FixtureTestCase(TestCase):
|
|||||||
|
|
||||||
def test_fixture_loading(self):
|
def test_fixture_loading(self):
|
||||||
"Multi-db fixtures are loaded correctly"
|
"Multi-db fixtures are loaded correctly"
|
||||||
# Check that "Dive into Python" exists on the default database, but not on other database
|
# Check that "Pro Django" exists on the default database, but not on other database
|
||||||
try:
|
try:
|
||||||
Book.objects.get(title="Dive into Python")
|
Book.objects.get(title="Pro Django")
|
||||||
Book.objects.using('default').get(title="Dive into Python")
|
Book.objects.using('default').get(title="Pro Django")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Dive Into Python" should exist on default database')
|
self.fail('"Dive Into Python" should exist on default database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('other').get,
|
Book.objects.using('other').get,
|
||||||
title="Dive into Python"
|
title="Pro Django"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that "Pro Django" exists on the default database, but not on other database
|
# Check that "Dive into Python" exists on the default database, but not on other database
|
||||||
try:
|
try:
|
||||||
Book.objects.using('other').get(title="Pro Django")
|
Book.objects.using('other').get(title="Dive into Python")
|
||||||
except Book.DoesNotExist:
|
except Book.DoesNotExist:
|
||||||
self.fail('"Pro Django" should exist on other database')
|
self.fail('"Dive into Python" should exist on other database')
|
||||||
|
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.get,
|
Book.objects.get,
|
||||||
title="Pro Django"
|
title="Dive into Python"
|
||||||
)
|
)
|
||||||
self.assertRaises(Book.DoesNotExist,
|
self.assertRaises(Book.DoesNotExist,
|
||||||
Book.objects.using('default').get,
|
Book.objects.using('default').get,
|
||||||
title="Pro Django"
|
title="Dive into Python"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that "Definitive Guide" exists on the both databases
|
# Check that "Definitive Guide" exists on the both databases
|
||||||
@ -251,6 +383,6 @@ class PickleQuerySetTestCase(TestCase):
|
|||||||
|
|
||||||
def test_pickling(self):
|
def test_pickling(self):
|
||||||
for db in connections:
|
for db in connections:
|
||||||
Book.objects.using(db).create(title='Pro Django', published=datetime.date(2008, 12, 16))
|
Book.objects.using(db).create(title='Dive into Python', published=datetime.date(2009, 5, 4))
|
||||||
qs = Book.objects.all()
|
qs = Book.objects.all()
|
||||||
self.assertEqual(qs._using, pickle.loads(pickle.dumps(qs))._using)
|
self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db)
|
||||||
|
@ -822,8 +822,8 @@ We can do slicing beyond what is currently in the result cache, too.
|
|||||||
|
|
||||||
Bug #7045 -- extra tables used to crash SQL construction on the second use.
|
Bug #7045 -- extra tables used to crash SQL construction on the second use.
|
||||||
>>> qs = Ranking.objects.extra(tables=['django_site'])
|
>>> qs = Ranking.objects.extra(tables=['django_site'])
|
||||||
>>> s = qs.query.get_compiler(qs._using).as_sql()
|
>>> s = qs.query.get_compiler(qs.db).as_sql()
|
||||||
>>> s = qs.query.get_compiler(qs._using).as_sql() # test passes if this doesn't raise an exception.
|
>>> s = qs.query.get_compiler(qs.db).as_sql() # test passes if this doesn't raise an exception.
|
||||||
|
|
||||||
Bug #7098 -- Make sure semi-deprecated ordering by related models syntax still
|
Bug #7098 -- Make sure semi-deprecated ordering by related models syntax still
|
||||||
works.
|
works.
|
||||||
@ -912,9 +912,9 @@ We should also be able to pickle things that use select_related(). The only
|
|||||||
tricky thing here is to ensure that we do the related selections properly after
|
tricky thing here is to ensure that we do the related selections properly after
|
||||||
unpickling.
|
unpickling.
|
||||||
>>> qs = Item.objects.select_related()
|
>>> qs = Item.objects.select_related()
|
||||||
>>> query = qs.query.get_compiler(qs._using).as_sql()[0]
|
>>> query = qs.query.get_compiler(qs.db).as_sql()[0]
|
||||||
>>> query2 = pickle.loads(pickle.dumps(qs.query))
|
>>> query2 = pickle.loads(pickle.dumps(qs.query))
|
||||||
>>> query2.get_compiler(qs._using).as_sql()[0] == query
|
>>> query2.get_compiler(qs.db).as_sql()[0] == query
|
||||||
True
|
True
|
||||||
|
|
||||||
Check pickling of deferred-loading querysets
|
Check pickling of deferred-loading querysets
|
||||||
@ -1051,7 +1051,7 @@ sufficient that this query runs without error.
|
|||||||
Calling order_by() with no parameters removes any existing ordering on the
|
Calling order_by() with no parameters removes any existing ordering on the
|
||||||
model. But it should still be possible to add new ordering after that.
|
model. But it should still be possible to add new ordering after that.
|
||||||
>>> qs = Author.objects.order_by().order_by('name')
|
>>> qs = Author.objects.order_by().order_by('name')
|
||||||
>>> 'ORDER BY' in qs.query.get_compiler(qs._using).as_sql()[0]
|
>>> 'ORDER BY' in qs.query.get_compiler(qs.db).as_sql()[0]
|
||||||
True
|
True
|
||||||
|
|
||||||
Incorrect SQL was being generated for certain types of exclude() queries that
|
Incorrect SQL was being generated for certain types of exclude() queries that
|
||||||
@ -1086,7 +1086,7 @@ performance problems on backends like MySQL.
|
|||||||
Nested queries should not evaluate the inner query as part of constructing the
|
Nested queries should not evaluate the inner query as part of constructing the
|
||||||
SQL (so we should see a nested query here, indicated by two "SELECT" calls).
|
SQL (so we should see a nested query here, indicated by two "SELECT" calls).
|
||||||
>>> qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
|
>>> qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
|
||||||
>>> qs.query.get_compiler(qs._using).as_sql()[0].count('SELECT')
|
>>> qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT')
|
||||||
2
|
2
|
||||||
|
|
||||||
Bug #10181 -- Avoid raising an EmptyResultSet if an inner query is provably
|
Bug #10181 -- Avoid raising an EmptyResultSet if an inner query is provably
|
||||||
|
Loading…
x
Reference in New Issue
Block a user