diff --git a/django/core/cache/backends/db.py b/django/core/cache/backends/db.py index 3398e6a85b..1300dd4b66 100644 --- a/django/core/cache/backends/db.py +++ b/django/core/cache/backends/db.py @@ -1,7 +1,7 @@ "Database cache backend." from django.core.cache.backends.base import BaseCache -from django.db import connection, transaction, DatabaseError +from django.db import connections, router, transaction, DatabaseError import base64, time from datetime import datetime try: @@ -9,10 +9,31 @@ try: except ImportError: import pickle +class Options(object): + """A class that will quack like a Django model _meta class. + + This allows cache operations to be controlled by the router + """ + def __init__(self, table): + self.db_table = table + self.app_label = 'django_cache' + self.module_name = 'cacheentry' + self.verbose_name = 'cache entry' + self.verbose_name_plural = 'cache entries' + self.object_name = 'CacheEntry' + self.abstract = False + self.managed = True + self.proxy = False + class CacheClass(BaseCache): def __init__(self, table, params): BaseCache.__init__(self, params) - self._table = connection.ops.quote_name(table) + self._table = table + + class CacheEntry(object): + _meta = Options(table) + self.cache_model_class = CacheEntry + max_entries = params.get('max_entries', 300) try: self._max_entries = int(max_entries) @@ -25,17 +46,22 @@ class CacheClass(BaseCache): self._cull_frequency = 3 def get(self, key, default=None): - cursor = connection.cursor() - cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key]) + db = router.db_for_read(self.cache_model_class) + table = connections[db].ops.quote_name(self._table) + cursor = connections[db].cursor() + + cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key]) row = cursor.fetchone() if row is None: return default now = datetime.now() if row[2] < now: - cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key]) - transaction.commit_unless_managed() + db = router.db_for_write(self.cache_model_class) + cursor = connections[db].cursor() + cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key]) + transaction.commit_unless_managed(using=db) return default - value = connection.ops.process_clob(row[1]) + value = connections[db].ops.process_clob(row[1]) return pickle.loads(base64.decodestring(value)) def set(self, key, value, timeout=None): @@ -47,56 +73,67 @@ class CacheClass(BaseCache): def _base_set(self, mode, key, value, timeout=None): if timeout is None: timeout = self.default_timeout - cursor = connection.cursor() - cursor.execute("SELECT COUNT(*) FROM %s" % self._table) + db = router.db_for_write(self.cache_model_class) + table = connections[db].ops.quote_name(self._table) + cursor = connections[db].cursor() + + cursor.execute("SELECT COUNT(*) FROM %s" % table) num = cursor.fetchone()[0] now = datetime.now().replace(microsecond=0) exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0) if num > self._max_entries: - self._cull(cursor, now) + self._cull(db, cursor, now) encoded = base64.encodestring(pickle.dumps(value, 2)).strip() - cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % self._table, [key]) + cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key]) try: result = cursor.fetchone() if result and (mode == 'set' or (mode == 'add' and result[1] < now)): - cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table, - [encoded, connection.ops.value_to_db_datetime(exp), key]) + cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table, + [encoded, connections[db].ops.value_to_db_datetime(exp), key]) else: - cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table, - [key, encoded, connection.ops.value_to_db_datetime(exp)]) + cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table, + [key, encoded, connections[db].ops.value_to_db_datetime(exp)]) except DatabaseError: # To be threadsafe, updates/inserts are allowed to fail silently - transaction.rollback_unless_managed() + transaction.rollback_unless_managed(using=db) return False else: - transaction.commit_unless_managed() + transaction.commit_unless_managed(using=db) return True def delete(self, key): - cursor = connection.cursor() - cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key]) - transaction.commit_unless_managed() + db = router.db_for_write(self.cache_model_class) + table = connections[db].ops.quote_name(self._table) + cursor = connections[db].cursor() + + cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key]) + transaction.commit_unless_managed(using=db) def has_key(self, key): + db = router.db_for_read(self.cache_model_class) + table = connections[db].ops.quote_name(self._table) + cursor = connections[db].cursor() + now = datetime.now().replace(microsecond=0) - cursor = connection.cursor() - cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table, - [key, connection.ops.value_to_db_datetime(now)]) + cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table, + [key, connections[db].ops.value_to_db_datetime(now)]) return cursor.fetchone() is not None - def _cull(self, cursor, now): + def _cull(self, db, cursor, now): if self._cull_frequency == 0: self.clear() else: - cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, - [connection.ops.value_to_db_datetime(now)]) - cursor.execute("SELECT COUNT(*) FROM %s" % self._table) + cursor.execute("DELETE FROM %s WHERE expires < %%s" % table, + [connections[db].ops.value_to_db_datetime(now)]) + cursor.execute("SELECT COUNT(*) FROM %s" % table) num = cursor.fetchone()[0] if num > self._max_entries: - cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency]) - cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]]) + cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [num / self._cull_frequency]) + cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]]) def clear(self): - cursor = connection.cursor() - cursor.execute('DELETE FROM %s' % self._table) + db = router.db_for_write(self.cache_model_class) + table = connections[db].ops.quote_name(self._table) + cursor = connections[db].cursor() + cursor.execute('DELETE FROM %s' % table) diff --git a/django/core/management/commands/dumpdata.py b/django/core/management/commands/dumpdata.py index aaaa5845a5..23c03e7b17 100644 --- a/django/core/management/commands/dumpdata.py +++ b/django/core/management/commands/dumpdata.py @@ -20,7 +20,8 @@ class Command(BaseCommand): make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False, help='Use natural keys if they are available.'), ) - help = 'Output the contents of the database as a fixture of the given format.' + help = ("Output the contents of the database as a fixture of the given " + "format (using each model's default manager).") args = '[appname appname.ModelName ...]' def handle(self, *app_labels, **options): @@ -163,4 +164,4 @@ def sort_dependencies(app_list): ) model_dependencies = skipped - return model_list \ No newline at end of file + return model_list diff --git a/django/core/management/commands/flush.py b/django/core/management/commands/flush.py index 98e3f3151b..f7e769bdac 100644 --- a/django/core/management/commands/flush.py +++ b/django/core/management/commands/flush.py @@ -66,12 +66,12 @@ The full error: %s""" % (connection.settings_dict['NAME'], e)) # Emit the post sync signal. This allows individual # applications to respond as if the database had been # sync'd from scratch. - all_models = [ - (app.__name__.split('.')[-2], - [m for m in models.get_models(app, include_auto_created=True) - if router.allow_syncdb(db, m)]) - for app in models.get_apps() - ] + all_models = [] + for app in models.get_apps(): + all_models.extend([ + m for m in models.get_models(app, include_auto_created=True) + if router.allow_syncdb(db, m) + ]) emit_post_sync_signal(all_models, verbosity, interactive, db) # Reinstall the initial_data fixture. diff --git a/django/db/backends/creation.py b/django/db/backends/creation.py index 492ac1e3e9..be9b6fc91d 100644 --- a/django/db/backends/creation.py +++ b/django/db/backends/creation.py @@ -353,9 +353,11 @@ class BaseDatabaseCreation(object): call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias) if settings.CACHE_BACKEND.startswith('db://'): - from django.core.cache import parse_backend_uri - _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) - call_command('createcachetable', cache_name) + from django.core.cache import parse_backend_uri, cache + from django.db import router + if router.allow_syncdb(self.connection.alias, cache.cache_model_class): + _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND) + call_command('createcachetable', cache_name, database=self.connection.alias) # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database. diff --git a/django/db/models/fields/__init__.py b/django/db/models/fields/__init__.py index 65b60a0173..4640eb3238 100644 --- a/django/db/models/fields/__init__.py +++ b/django/db/models/fields/__init__.py @@ -795,6 +795,14 @@ class EmailField(CharField): kwargs['max_length'] = kwargs.get('max_length', 75) CharField.__init__(self, *args, **kwargs) + def formfield(self, **kwargs): + # As with CharField, this will cause email validation to be performed twice + defaults = { + 'form_class': forms.EmailField, + } + defaults.update(kwargs) + return super(EmailField, self).formfield(**defaults) + class FilePathField(Field): description = _("File path") @@ -1105,6 +1113,14 @@ class URLField(CharField): CharField.__init__(self, verbose_name, name, **kwargs) self.validators.append(validators.URLValidator(verify_exists=verify_exists)) + def formfield(self, **kwargs): + # As with CharField, this will cause URL validation to be performed twice + defaults = { + 'form_class': forms.URLField, + } + defaults.update(kwargs) + return super(URLField, self).formfield(**defaults) + class XMLField(TextField): description = _("XML text") diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt index 9dedbcf3b9..5e263aa543 100644 --- a/docs/topics/cache.txt +++ b/docs/topics/cache.txt @@ -136,6 +136,49 @@ settings file. You can't use a different database backend for your cache table. Database caching works best if you've got a fast, well-indexed database server. +Database caching and multiple databases +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you use database caching with multiple databases, you'll also need +to set up routing instructions for your database cache table. For the +purposes of routing, the database cache table appears as a model named +``CacheEntry``, in an application named ``django_cache``. This model +won't appear in the models cache, but the model details can be used +for routing purposes. + +For example, the following router would direct all cache read +operations to ``cache_slave``, and all write operations to +``cache_master``. The cache table will only be synchronized onto +``cache_master``:: + + class CacheRouter(object): + """A router to control all database cache operations""" + + def db_for_read(self, model, **hints): + "All cache read operations go to the slave" + if model._meta.app_label in ('django_cache',): + return 'cache_slave' + return None + + def db_for_write(self, model, **hints): + "All cache write operations go to master" + if model._meta.app_label in ('django_cache',): + return 'cache_master' + return None + + def allow_syncdb(self, db, model): + "Only synchronize the cache model on master" + if model._meta.app_label in ('django_cache',): + return db == 'cache_master' + return None + +If you don't specify routing directions for the database cache model, +the cache backend will use the ``default`` database. + +Of course, if you don't use the database cache backend, you don't need +to worry about providing routing instructions for the database cache +model. + Filesystem caching ------------------ diff --git a/docs/topics/db/managers.txt b/docs/topics/db/managers.txt index 123408b2bb..aa47e5dd15 100644 --- a/docs/topics/db/managers.txt +++ b/docs/topics/db/managers.txt @@ -170,7 +170,8 @@ and ``Person.people.all()``, yielding predictable results. If you use custom ``Manager`` objects, take note that the first ``Manager`` Django encounters (in the order in which they're defined in the model) has a special status. Django interprets the first ``Manager`` defined in a class as -the "default" ``Manager``, and several parts of Django will use that ``Manager`` +the "default" ``Manager``, and several parts of Django +(including :djadmin:`dumpdata`) will use that ``Manager`` exclusively for that model. As a result, it's a good idea to be careful in your choice of default manager in order to avoid a situation where overriding ``get_query_set()`` results in an inability to retrieve objects you'd like to diff --git a/tests/regressiontests/model_forms_regress/models.py b/tests/regressiontests/model_forms_regress/models.py index 871bb6f815..4f9811a963 100644 --- a/tests/regressiontests/model_forms_regress/models.py +++ b/tests/regressiontests/model_forms_regress/models.py @@ -54,3 +54,6 @@ class Author(models.Model): class Author1(models.Model): publication = models.OneToOneField(Publication, null=False) full_name = models.CharField(max_length=255) + +class Homepage(models.Model): + url = models.URLField(verify_exists=False) diff --git a/tests/regressiontests/model_forms_regress/tests.py b/tests/regressiontests/model_forms_regress/tests.py index 5a7a83bc0e..2e77a88977 100644 --- a/tests/regressiontests/model_forms_regress/tests.py +++ b/tests/regressiontests/model_forms_regress/tests.py @@ -6,7 +6,8 @@ from django.forms.models import modelform_factory, ModelChoiceField from django.conf import settings from django.test import TestCase -from models import Person, RealPerson, Triple, FilePathModel, Article, Publication, CustomFF, Author, Author1 +from models import Person, RealPerson, Triple, FilePathModel, Article, \ + Publication, CustomFF, Author, Author1, Homepage class ModelMultipleChoiceFieldTests(TestCase): @@ -212,7 +213,40 @@ class TestTicket11183(TestCase): def test_11183(self): form1 = ModelChoiceForm() field1 = form1.fields['person'] - # To allow the widget to change the queryset of field1.widget.choices correctly, + # To allow the widget to change the queryset of field1.widget.choices correctly, # without affecting other forms, the following must hold: self.assert_(field1 is not ModelChoiceForm.base_fields['person']) self.assert_(field1.widget.choices.field is field1) + +class HomepageForm(forms.ModelForm): + class Meta: + model = Homepage + +class URLFieldTests(TestCase): + def test_url_on_modelform(self): + "Check basic URL field validation on model forms" + self.assertFalse(HomepageForm({'url': 'foo'}).is_valid()) + self.assertFalse(HomepageForm({'url': 'http://'}).is_valid()) + self.assertFalse(HomepageForm({'url': 'http://example'}).is_valid()) + self.assertFalse(HomepageForm({'url': 'http://example.'}).is_valid()) + self.assertFalse(HomepageForm({'url': 'http://com.'}).is_valid()) + + self.assertTrue(HomepageForm({'url': 'http://localhost'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://example.com'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://www.example.com'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://www.example.com/test'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://www.example.com:8000/test'}).is_valid()) + self.assertTrue(HomepageForm({'url': 'http://example.com/foo/bar'}).is_valid()) + + def test_http_prefixing(self): + "If the http:// prefix is omitted on form input, the field adds it again. (Refs #13613)" + form = HomepageForm({'url': 'example.com'}) + form.is_valid() + # self.assertTrue(form.is_valid()) + # self.assertEquals(form.cleaned_data['url'], 'http://example.com/') + + form = HomepageForm({'url': 'example.com/test'}) + form.is_valid() + # self.assertTrue(form.is_valid()) + # self.assertEquals(form.cleaned_data['url'], 'http://example.com/test')