1
0
mirror of https://github.com/django/django.git synced 2025-07-04 17:59:13 +00:00

[soc2010/test-refactor] Merged up to trunk again to fix failing tests.

git-svn-id: http://code.djangoproject.com/svn/django/branches/soc2010/test-refactor@13478 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Paul McMillan 2010-08-05 04:30:44 +00:00
parent 0620a1c223
commit 6c7baa96d8
9 changed files with 182 additions and 45 deletions

View File

@ -1,7 +1,7 @@
"Database cache backend." "Database cache backend."
from django.core.cache.backends.base import BaseCache from django.core.cache.backends.base import BaseCache
from django.db import connection, transaction, DatabaseError from django.db import connections, router, transaction, DatabaseError
import base64, time import base64, time
from datetime import datetime from datetime import datetime
try: try:
@ -9,10 +9,31 @@ try:
except ImportError: except ImportError:
import pickle import pickle
class Options(object):
"""A class that will quack like a Django model _meta class.
This allows cache operations to be controlled by the router
"""
def __init__(self, table):
self.db_table = table
self.app_label = 'django_cache'
self.module_name = 'cacheentry'
self.verbose_name = 'cache entry'
self.verbose_name_plural = 'cache entries'
self.object_name = 'CacheEntry'
self.abstract = False
self.managed = True
self.proxy = False
class CacheClass(BaseCache): class CacheClass(BaseCache):
def __init__(self, table, params): def __init__(self, table, params):
BaseCache.__init__(self, params) BaseCache.__init__(self, params)
self._table = connection.ops.quote_name(table) self._table = table
class CacheEntry(object):
_meta = Options(table)
self.cache_model_class = CacheEntry
max_entries = params.get('max_entries', 300) max_entries = params.get('max_entries', 300)
try: try:
self._max_entries = int(max_entries) self._max_entries = int(max_entries)
@ -25,17 +46,22 @@ class CacheClass(BaseCache):
self._cull_frequency = 3 self._cull_frequency = 3
def get(self, key, default=None): def get(self, key, default=None):
cursor = connection.cursor() db = router.db_for_read(self.cache_model_class)
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key]) table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key])
row = cursor.fetchone() row = cursor.fetchone()
if row is None: if row is None:
return default return default
now = datetime.now() now = datetime.now()
if row[2] < now: if row[2] < now:
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key]) db = router.db_for_write(self.cache_model_class)
transaction.commit_unless_managed() cursor = connections[db].cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db)
return default return default
value = connection.ops.process_clob(row[1]) value = connections[db].ops.process_clob(row[1])
return pickle.loads(base64.decodestring(value)) return pickle.loads(base64.decodestring(value))
def set(self, key, value, timeout=None): def set(self, key, value, timeout=None):
@ -47,56 +73,67 @@ class CacheClass(BaseCache):
def _base_set(self, mode, key, value, timeout=None): def _base_set(self, mode, key, value, timeout=None):
if timeout is None: if timeout is None:
timeout = self.default_timeout timeout = self.default_timeout
cursor = connection.cursor() db = router.db_for_write(self.cache_model_class)
cursor.execute("SELECT COUNT(*) FROM %s" % self._table) table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0] num = cursor.fetchone()[0]
now = datetime.now().replace(microsecond=0) now = datetime.now().replace(microsecond=0)
exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0) exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
if num > self._max_entries: if num > self._max_entries:
self._cull(cursor, now) self._cull(db, cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip() encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % self._table, [key]) cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key])
try: try:
result = cursor.fetchone() result = cursor.fetchone()
if result and (mode == 'set' or if result and (mode == 'set' or
(mode == 'add' and result[1] < now)): (mode == 'add' and result[1] < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table, cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table,
[encoded, connection.ops.value_to_db_datetime(exp), key]) [encoded, connections[db].ops.value_to_db_datetime(exp), key])
else: else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table, cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table,
[key, encoded, connection.ops.value_to_db_datetime(exp)]) [key, encoded, connections[db].ops.value_to_db_datetime(exp)])
except DatabaseError: except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently # To be threadsafe, updates/inserts are allowed to fail silently
transaction.rollback_unless_managed() transaction.rollback_unless_managed(using=db)
return False return False
else: else:
transaction.commit_unless_managed() transaction.commit_unless_managed(using=db)
return True return True
def delete(self, key): def delete(self, key):
cursor = connection.cursor() db = router.db_for_write(self.cache_model_class)
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key]) table = connections[db].ops.quote_name(self._table)
transaction.commit_unless_managed() cursor = connections[db].cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db)
def has_key(self, key): def has_key(self, key):
db = router.db_for_read(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
now = datetime.now().replace(microsecond=0) now = datetime.now().replace(microsecond=0)
cursor = connection.cursor() cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table,
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table, [key, connections[db].ops.value_to_db_datetime(now)])
[key, connection.ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None return cursor.fetchone() is not None
def _cull(self, cursor, now): def _cull(self, db, cursor, now):
if self._cull_frequency == 0: if self._cull_frequency == 0:
self.clear() self.clear()
else: else:
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
[connection.ops.value_to_db_datetime(now)]) [connections[db].ops.value_to_db_datetime(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % self._table) cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0] num = cursor.fetchone()[0]
if num > self._max_entries: if num > self._max_entries:
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency]) cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [num / self._cull_frequency])
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]]) cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]])
def clear(self): def clear(self):
cursor = connection.cursor() db = router.db_for_write(self.cache_model_class)
cursor.execute('DELETE FROM %s' % self._table) table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute('DELETE FROM %s' % table)

View File

@ -20,7 +20,8 @@ class Command(BaseCommand):
make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False, make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
help='Use natural keys if they are available.'), help='Use natural keys if they are available.'),
) )
help = 'Output the contents of the database as a fixture of the given format.' help = ("Output the contents of the database as a fixture of the given "
"format (using each model's default manager).")
args = '[appname appname.ModelName ...]' args = '[appname appname.ModelName ...]'
def handle(self, *app_labels, **options): def handle(self, *app_labels, **options):

View File

@ -66,12 +66,12 @@ The full error: %s""" % (connection.settings_dict['NAME'], e))
# Emit the post sync signal. This allows individual # Emit the post sync signal. This allows individual
# applications to respond as if the database had been # applications to respond as if the database had been
# sync'd from scratch. # sync'd from scratch.
all_models = [ all_models = []
(app.__name__.split('.')[-2], for app in models.get_apps():
[m for m in models.get_models(app, include_auto_created=True) all_models.extend([
if router.allow_syncdb(db, m)]) m for m in models.get_models(app, include_auto_created=True)
for app in models.get_apps() if router.allow_syncdb(db, m)
] ])
emit_post_sync_signal(all_models, verbosity, interactive, db) emit_post_sync_signal(all_models, verbosity, interactive, db)
# Reinstall the initial_data fixture. # Reinstall the initial_data fixture.

View File

@ -353,9 +353,11 @@ class BaseDatabaseCreation(object):
call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias)
if settings.CACHE_BACKEND.startswith('db://'): if settings.CACHE_BACKEND.startswith('db://'):
from django.core.cache import parse_backend_uri from django.core.cache import parse_backend_uri, cache
_, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) from django.db import router
call_command('createcachetable', cache_name) if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
_, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
call_command('createcachetable', cache_name, database=self.connection.alias)
# Get a cursor (even though we don't need one yet). This has # Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database. # the side effect of initializing the test database.

View File

@ -795,6 +795,14 @@ class EmailField(CharField):
kwargs['max_length'] = kwargs.get('max_length', 75) kwargs['max_length'] = kwargs.get('max_length', 75)
CharField.__init__(self, *args, **kwargs) CharField.__init__(self, *args, **kwargs)
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed twice
defaults = {
'form_class': forms.EmailField,
}
defaults.update(kwargs)
return super(EmailField, self).formfield(**defaults)
class FilePathField(Field): class FilePathField(Field):
description = _("File path") description = _("File path")
@ -1105,6 +1113,14 @@ class URLField(CharField):
CharField.__init__(self, verbose_name, name, **kwargs) CharField.__init__(self, verbose_name, name, **kwargs)
self.validators.append(validators.URLValidator(verify_exists=verify_exists)) self.validators.append(validators.URLValidator(verify_exists=verify_exists))
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed twice
defaults = {
'form_class': forms.URLField,
}
defaults.update(kwargs)
return super(URLField, self).formfield(**defaults)
class XMLField(TextField): class XMLField(TextField):
description = _("XML text") description = _("XML text")

View File

@ -136,6 +136,49 @@ settings file. You can't use a different database backend for your cache table.
Database caching works best if you've got a fast, well-indexed database server. Database caching works best if you've got a fast, well-indexed database server.
Database caching and multiple databases
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you use database caching with multiple databases, you'll also need
to set up routing instructions for your database cache table. For the
purposes of routing, the database cache table appears as a model named
``CacheEntry``, in an application named ``django_cache``. This model
won't appear in the models cache, but the model details can be used
for routing purposes.
For example, the following router would direct all cache read
operations to ``cache_slave``, and all write operations to
``cache_master``. The cache table will only be synchronized onto
``cache_master``::
class CacheRouter(object):
"""A router to control all database cache operations"""
def db_for_read(self, model, **hints):
"All cache read operations go to the slave"
if model._meta.app_label in ('django_cache',):
return 'cache_slave'
return None
def db_for_write(self, model, **hints):
"All cache write operations go to master"
if model._meta.app_label in ('django_cache',):
return 'cache_master'
return None
def allow_syncdb(self, db, model):
"Only synchronize the cache model on master"
if model._meta.app_label in ('django_cache',):
return db == 'cache_master'
return None
If you don't specify routing directions for the database cache model,
the cache backend will use the ``default`` database.
Of course, if you don't use the database cache backend, you don't need
to worry about providing routing instructions for the database cache
model.
Filesystem caching Filesystem caching
------------------ ------------------

View File

@ -170,7 +170,8 @@ and ``Person.people.all()``, yielding predictable results.
If you use custom ``Manager`` objects, take note that the first ``Manager`` If you use custom ``Manager`` objects, take note that the first ``Manager``
Django encounters (in the order in which they're defined in the model) has a Django encounters (in the order in which they're defined in the model) has a
special status. Django interprets the first ``Manager`` defined in a class as special status. Django interprets the first ``Manager`` defined in a class as
the "default" ``Manager``, and several parts of Django will use that ``Manager`` the "default" ``Manager``, and several parts of Django
(including :djadmin:`dumpdata`) will use that ``Manager``
exclusively for that model. As a result, it's a good idea to be careful in exclusively for that model. As a result, it's a good idea to be careful in
your choice of default manager in order to avoid a situation where overriding your choice of default manager in order to avoid a situation where overriding
``get_query_set()`` results in an inability to retrieve objects you'd like to ``get_query_set()`` results in an inability to retrieve objects you'd like to

View File

@ -54,3 +54,6 @@ class Author(models.Model):
class Author1(models.Model): class Author1(models.Model):
publication = models.OneToOneField(Publication, null=False) publication = models.OneToOneField(Publication, null=False)
full_name = models.CharField(max_length=255) full_name = models.CharField(max_length=255)
class Homepage(models.Model):
url = models.URLField(verify_exists=False)

View File

@ -6,7 +6,8 @@ from django.forms.models import modelform_factory, ModelChoiceField
from django.conf import settings from django.conf import settings
from django.test import TestCase from django.test import TestCase
from models import Person, RealPerson, Triple, FilePathModel, Article, Publication, CustomFF, Author, Author1 from models import Person, RealPerson, Triple, FilePathModel, Article, \
Publication, CustomFF, Author, Author1, Homepage
class ModelMultipleChoiceFieldTests(TestCase): class ModelMultipleChoiceFieldTests(TestCase):
@ -216,3 +217,36 @@ class TestTicket11183(TestCase):
# without affecting other forms, the following must hold: # without affecting other forms, the following must hold:
self.assert_(field1 is not ModelChoiceForm.base_fields['person']) self.assert_(field1 is not ModelChoiceForm.base_fields['person'])
self.assert_(field1.widget.choices.field is field1) self.assert_(field1.widget.choices.field is field1)
class HomepageForm(forms.ModelForm):
class Meta:
model = Homepage
class URLFieldTests(TestCase):
def test_url_on_modelform(self):
"Check basic URL field validation on model forms"
self.assertFalse(HomepageForm({'url': 'foo'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://example.'}).is_valid())
self.assertFalse(HomepageForm({'url': 'http://com.'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://localhost'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000/test'}).is_valid())
self.assertTrue(HomepageForm({'url': 'http://example.com/foo/bar'}).is_valid())
def test_http_prefixing(self):
"If the http:// prefix is omitted on form input, the field adds it again. (Refs #13613)"
form = HomepageForm({'url': 'example.com'})
form.is_valid()
# self.assertTrue(form.is_valid())
# self.assertEquals(form.cleaned_data['url'], 'http://example.com/')
form = HomepageForm({'url': 'example.com/test'})
form.is_valid()
# self.assertTrue(form.is_valid())
# self.assertEquals(form.cleaned_data['url'], 'http://example.com/test')