1
0
mirror of https://github.com/django/django.git synced 2025-11-07 07:15:35 +00:00

Fixed #13946 -- Modified the database cache backend to use the database router to determine availability of the cache table. Thanks to tiemonster for the report.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@13473 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Russell Keith-Magee
2010-08-05 02:13:32 +00:00
parent 1c8547bf98
commit fc374976e5
3 changed files with 116 additions and 34 deletions

View File

@@ -1,7 +1,7 @@
"Database cache backend."
from django.core.cache.backends.base import BaseCache
from django.db import connection, transaction, DatabaseError
from django.db import connections, router, transaction, DatabaseError
import base64, time
from datetime import datetime
try:
@@ -9,10 +9,31 @@ try:
except ImportError:
import pickle
class Options(object):
"""A class that will quack like a Django model _meta class.
This allows cache operations to be controlled by the router
"""
def __init__(self, table):
self.db_table = table
self.app_label = 'django_cache'
self.module_name = 'cacheentry'
self.verbose_name = 'cache entry'
self.verbose_name_plural = 'cache entries'
self.object_name = 'CacheEntry'
self.abstract = False
self.managed = True
self.proxy = False
class CacheClass(BaseCache):
def __init__(self, table, params):
BaseCache.__init__(self, params)
self._table = connection.ops.quote_name(table)
self._table = table
class CacheEntry(object):
_meta = Options(table)
self.cache_model_class = CacheEntry
max_entries = params.get('max_entries', 300)
try:
self._max_entries = int(max_entries)
@@ -25,17 +46,22 @@ class CacheClass(BaseCache):
self._cull_frequency = 3
def get(self, key, default=None):
cursor = connection.cursor()
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
db = router.db_for_read(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute("SELECT cache_key, value, expires FROM %s WHERE cache_key = %%s" % table, [key])
row = cursor.fetchone()
if row is None:
return default
now = datetime.now()
if row[2] < now:
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
db = router.db_for_write(self.cache_model_class)
cursor = connections[db].cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db)
return default
value = connection.ops.process_clob(row[1])
value = connections[db].ops.process_clob(row[1])
return pickle.loads(base64.decodestring(value))
def set(self, key, value, timeout=None):
@@ -47,56 +73,67 @@ class CacheClass(BaseCache):
def _base_set(self, mode, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
cursor = connection.cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
db = router.db_for_write(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
now = datetime.now().replace(microsecond=0)
exp = datetime.fromtimestamp(time.time() + timeout).replace(microsecond=0)
if num > self._max_entries:
self._cull(cursor, now)
self._cull(db, cursor, now)
encoded = base64.encodestring(pickle.dumps(value, 2)).strip()
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % self._table, [key])
cursor.execute("SELECT cache_key, expires FROM %s WHERE cache_key = %%s" % table, [key])
try:
result = cursor.fetchone()
if result and (mode == 'set' or
(mode == 'add' and result[1] < now)):
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % self._table,
[encoded, connection.ops.value_to_db_datetime(exp), key])
cursor.execute("UPDATE %s SET value = %%s, expires = %%s WHERE cache_key = %%s" % table,
[encoded, connections[db].ops.value_to_db_datetime(exp), key])
else:
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % self._table,
[key, encoded, connection.ops.value_to_db_datetime(exp)])
cursor.execute("INSERT INTO %s (cache_key, value, expires) VALUES (%%s, %%s, %%s)" % table,
[key, encoded, connections[db].ops.value_to_db_datetime(exp)])
except DatabaseError:
# To be threadsafe, updates/inserts are allowed to fail silently
transaction.rollback_unless_managed()
transaction.rollback_unless_managed(using=db)
return False
else:
transaction.commit_unless_managed()
transaction.commit_unless_managed(using=db)
return True
def delete(self, key):
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % self._table, [key])
transaction.commit_unless_managed()
db = router.db_for_write(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
transaction.commit_unless_managed(using=db)
def has_key(self, key):
db = router.db_for_read(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
now = datetime.now().replace(microsecond=0)
cursor = connection.cursor()
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % self._table,
[key, connection.ops.value_to_db_datetime(now)])
cursor.execute("SELECT cache_key FROM %s WHERE cache_key = %%s and expires > %%s" % table,
[key, connections[db].ops.value_to_db_datetime(now)])
return cursor.fetchone() is not None
def _cull(self, cursor, now):
def _cull(self, db, cursor, now):
if self._cull_frequency == 0:
self.clear()
else:
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table,
[connection.ops.value_to_db_datetime(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
[connections[db].ops.value_to_db_datetime(now)])
cursor.execute("SELECT COUNT(*) FROM %s" % table)
num = cursor.fetchone()[0]
if num > self._max_entries:
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % table, [num / self._cull_frequency])
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % table, [cursor.fetchone()[0]])
def clear(self):
cursor = connection.cursor()
cursor.execute('DELETE FROM %s' % self._table)
db = router.db_for_write(self.cache_model_class)
table = connections[db].ops.quote_name(self._table)
cursor = connections[db].cursor()
cursor.execute('DELETE FROM %s' % table)