1
0
mirror of https://github.com/django/django.git synced 2025-07-04 09:49:12 +00:00

boulder-oracle-sprint: Refactored backend query.py modules away to reduce some extra diffs

with the trunk.


git-svn-id: http://code.djangoproject.com/svn/django/branches/boulder-oracle-sprint@4728 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Boulder Sprinters 2007-03-14 21:34:23 +00:00
parent 1c6eeb2dda
commit 871f497e67
16 changed files with 420 additions and 260 deletions

View File

@ -23,20 +23,12 @@ except ImportError, e:
else:
raise # If there's some other error, this must be an error in Django itself.
def backend_module_accessor(module_name):
def accessor():
full_name = 'django.db.backends.%s.%s' % (settings.DATABASE_ENGINE, module_name)
return __import__(full_name, {}, {}, [''])
return accessor
get_introspection_module = backend_module_accessor("introspection")
get_creation_module = backend_module_accessor("creation")
get_query_module = backend_module_accessor("query")
get_client_module = backend_module_accessor("client")
runshell = lambda: get_client_module().runshell()
get_introspection_module = lambda: __import__('django.db.backends.%s.introspection' % settings.DATABASE_ENGINE, {}, {}, [''])
get_creation_module = lambda: __import__('django.db.backends.%s.creation' % settings.DATABASE_ENGINE, {}, {}, [''])
get_query_module = lambda: __import__('django.db.backends.%s.query' % settings.DATABASE_ENGINE, {}, {}, [''])
runshell = lambda: __import__('django.db.backends.%s.client' % settings.DATABASE_ENGINE, {}, {}, ['']).runshell()
connection = backend.DatabaseWrapper(**settings.DATABASE_OPTIONS)
DatabaseError = backend.DatabaseError
# Register an event that closes the database connection

View File

@ -10,6 +10,9 @@ try:
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "Error loading MySQLdb module: %s" % e
if Database.version_info < (1,2,1,'final',2):
raise ImportError, "MySQLdb-1.2.1p2 or newer is required; you have %s" % MySQLdb.__version__
from MySQLdb.converters import conversions
from MySQLdb.constants import FIELD_TYPE
import types
@ -17,11 +20,14 @@ import re
DatabaseError = Database.DatabaseError
# MySQLdb-1.2.1 supports the Python boolean type, and only uses datetime
# module for time-related columns; older versions could have used mx.DateTime
# or strings if there were no datetime module. However, MySQLdb still returns
# TIME columns as timedelta -- they are more like timedelta in terms of actual
# behavior as they are signed and include days -- and Django expects time, so
# we still need to override that.
django_conversions = conversions.copy()
django_conversions.update({
types.BooleanType: util.rev_typecast_boolean,
FIELD_TYPE.DATETIME: util.typecast_timestamp,
FIELD_TYPE.DATE: util.typecast_date,
FIELD_TYPE.TIME: util.typecast_time,
})
@ -31,31 +37,12 @@ django_conversions.update({
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# This is an extra debug layer over MySQL queries, to display warnings.
# It's only used when DEBUG=True.
class MysqlDebugWrapper:
def __init__(self, cursor):
self.cursor = cursor
def execute(self, sql, params=()):
try:
return self.cursor.execute(sql, params)
except Database.Warning, w:
self.cursor.execute("SHOW WARNINGS")
raise Database.Warning, "%s: %s" % (w, self.cursor.fetchall())
def executemany(self, sql, param_list):
try:
return self.cursor.executemany(sql, param_list)
except Database.Warning, w:
self.cursor.execute("SHOW WARNINGS")
raise Database.Warning, "%s: %s" % (w, self.cursor.fetchall())
def __getattr__(self, attr):
if self.__dict__.has_key(attr):
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard util.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
try:
# Only exists in Python 2.4+
@ -83,28 +70,31 @@ class DatabaseWrapper(local):
def cursor(self):
from django.conf import settings
from warnings import filterwarnings
if not self._valid_connection():
kwargs = {
'user': settings.DATABASE_USER,
'db': settings.DATABASE_NAME,
'passwd': settings.DATABASE_PASSWORD,
'conv': django_conversions,
}
if settings.DATABASE_USER:
kwargs['user'] = settings.DATABASE_USER
if settings.DATABASE_NAME:
kwargs['db'] = settings.DATABASE_NAME
if settings.DATABASE_PASSWORD:
kwargs['passwd'] = settings.DATABASE_PASSWORD
if settings.DATABASE_HOST.startswith('/'):
kwargs['unix_socket'] = settings.DATABASE_HOST
else:
elif settings.DATABASE_HOST:
kwargs['host'] = settings.DATABASE_HOST
if settings.DATABASE_PORT:
kwargs['port'] = int(settings.DATABASE_PORT)
kwargs.update(self.options)
self.connection = Database.connect(**kwargs)
cursor = self.connection.cursor()
if self.connection.get_server_info() >= '4.1':
cursor.execute("SET NAMES 'utf8'")
else:
cursor = self.connection.cursor()
if settings.DEBUG:
return util.CursorDebugWrapper(MysqlDebugWrapper(cursor), self)
filterwarnings("error", category=Database.Warning)
return util.CursorDebugWrapper(cursor, self)
return cursor
def _commit(self):

View File

@ -3,12 +3,25 @@ import os
def runshell():
args = ['']
args += ["--user=%s" % settings.DATABASE_USER]
if settings.DATABASE_PASSWORD:
args += ["--password=%s" % settings.DATABASE_PASSWORD]
if settings.DATABASE_HOST:
args += ["--host=%s" % settings.DATABASE_HOST]
if settings.DATABASE_PORT:
args += ["--port=%s" % settings.DATABASE_PORT]
args += [settings.DATABASE_NAME]
db = settings.DATABASE_OPTIONS.get('db', settings.DATABASE_NAME)
user = settings.DATABASE_OPTIONS.get('user', settings.DATABASE_USER)
passwd = settings.DATABASE_OPTIONS.get('passwd', settings.DATABASE_PASSWORD)
host = settings.DATABASE_OPTIONS.get('host', settings.DATABASE_HOST)
port = settings.DATABASE_OPTIONS.get('port', settings.DATABASE_PORT)
defaults_file = settings.DATABASE_OPTIONS.get('read_default_file')
# Seems to be no good way to set sql_mode with CLI
if defaults_file:
args += ["--defaults-file=%s" % defaults_file]
if user:
args += ["--user=%s" % user]
if passwd:
args += ["--password=%s" % passwd]
if host:
args += ["--host=%s" % host]
if port:
args += ["--port=%s" % port]
if db:
args += [db]
os.execvp('mysql', args)

View File

@ -11,6 +11,9 @@ try:
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "Error loading cx_Oracle module: %s" % e
import datetime
from django.utils.datastructures import SortedDict
DatabaseError = Database.Error
@ -227,6 +230,201 @@ def get_drop_sequence(table):
drop_sequence_sql = 'DROP SEQUENCE %s;' % sq_name
return drop_sequence_sql
def get_query_set_class(DefaultQuerySet):
"Create a custom QuerySet class for Oracle."
from django.db import backend, connection
class OracleQuerySet(DefaultQuerySet):
def iterator(self):
"Performs the SELECT database lookup of this QuerySet."
from django.db.models.query import get_cached_row
# self._select is a dictionary, and dictionaries' key order is
# undefined, so we convert it to a list of tuples.
extra_select = self._select.items()
full_query = None
select, sql, params, full_query = self._get_sql_clause(get_full_query=True)
if not full_query:
full_query = "SELECT %s%s\n%s" % \
((self._distinct and "DISTINCT " or ""),
', '.join(select), sql)
cursor = connection.cursor()
cursor.execute(full_query, params)
fill_cache = self._select_related
index_end = len(self.model._meta.fields)
# so here's the logic;
# 1. retrieve each row in turn
# 2. convert NCLOBs
def resolve_cols(row):
for field in row:
if isinstance(field, Database.LOB):
yield str(field)
# cx_Oracle returns datetime.datetime objects for DATE
# columns, but Django wants a datetime.date.
# A workaround is to return a date if time fields are 0.
# A safer fix would involve either patching cx_Oracle,
# or checking the Model here, neither of which is good.
elif isinstance(field, datetime.datetime) and \
field.hour == field.minute == field.second == field.microsecond == 0:
yield field.date()
else:
yield field
for unresolved_row in cursor:
row = list(resolve_cols(unresolved_row))
if fill_cache:
obj, index_end = get_cached_row(self.model, row, 0)
else:
obj = self.model(*row[:index_end])
for i, k in enumerate(extra_select):
setattr(obj, k[0], row[index_end+i])
yield obj
def _get_sql_clause(self, get_full_query=False):
from django.db.models.query import fill_table_cache, \
handle_legacy_orderlist, orderfield2column
opts = self.model._meta
# Construct the fundamental parts of the query: SELECT X FROM Y WHERE Z.
select = ["%s.%s" % (backend.quote_name(opts.db_table), backend.quote_name(f.column)) for f in opts.fields]
tables = [quote_only_if_word(t) for t in self._tables]
joins = SortedDict()
where = self._where[:]
params = self._params[:]
# Convert self._filters into SQL.
joins2, where2, params2 = self._filters.get_sql(opts)
joins.update(joins2)
where.extend(where2)
params.extend(params2)
# Add additional tables and WHERE clauses based on select_related.
if self._select_related:
fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table])
# Add any additional SELECTs.
if self._select:
select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()])
# Start composing the body of the SQL statement.
sql = [" FROM", backend.quote_name(opts.db_table)]
# Compose the join dictionary into SQL describing the joins.
if joins:
sql.append(" ".join(["%s %s %s ON %s" % (join_type, table, alias, condition)
for (alias, (table, join_type, condition)) in joins.items()]))
# Compose the tables clause into SQL.
if tables:
sql.append(", " + ", ".join(tables))
# Compose the where clause into SQL.
if where:
sql.append(where and "WHERE " + " AND ".join(where))
# ORDER BY clause
order_by = []
if self._order_by is not None:
ordering_to_use = self._order_by
else:
ordering_to_use = opts.ordering
for f in handle_legacy_orderlist(ordering_to_use):
if f == '?': # Special case.
order_by.append(backend.get_random_function_sql())
else:
if f.startswith('-'):
col_name = f[1:]
order = "DESC"
else:
col_name = f
order = "ASC"
if "." in col_name:
table_prefix, col_name = col_name.split('.', 1)
table_prefix = backend.quote_name(table_prefix) + '.'
else:
# Use the database table as a column prefix if it wasn't given,
# and if the requested column isn't a custom SELECT.
if "." not in col_name and col_name not in (self._select or ()):
table_prefix = backend.quote_name(opts.db_table) + '.'
else:
table_prefix = ''
order_by.append('%s%s %s' % (table_prefix, backend.quote_name(orderfield2column(col_name, opts)), order))
if order_by:
sql.append("ORDER BY " + ", ".join(order_by))
# Look for column name collisions in the select elements
# and fix them with an AS alias. This allows us to do a
# SELECT * later in the paging query.
cols = [clause.split('.')[-1] for clause in select]
for index, col in enumerate(cols):
if cols.count(col) > 1:
col = '%s%d' % (col.replace('"', ''), index)
cols[index] = col
select[index] = '%s AS %s' % (select[index], col)
# LIMIT and OFFSET clauses
# To support limits and offsets, Oracle requires some funky rewriting of an otherwise normal looking query.
select_clause = ",".join(select)
distinct = (self._distinct and "DISTINCT " or "")
if order_by:
order_by_clause = " OVER (ORDER BY %s )" % (", ".join(order_by))
else:
#Oracle's row_number() function always requires an order-by clause.
#So we need to define a default order-by, since none was provided.
order_by_clause = " OVER (ORDER BY %s.%s)" % \
(backend.quote_name(opts.db_table),
backend.quote_name(opts.fields[0].db_column or opts.fields[0].column))
# limit_and_offset_clause
if self._limit is None:
assert self._offset is None, "'offset' is not allowed without 'limit'"
if self._offset is not None:
offset = int(self._offset)
else:
offset = 0
if self._limit is not None:
limit = int(self._limit)
else:
limit = None
limit_and_offset_clause = ''
if limit is not None:
limit_and_offset_clause = "WHERE rn > %s AND rn <= %s" % (offset, limit+offset)
elif offset:
limit_and_offset_clause = "WHERE rn > %s" % (offset)
if len(limit_and_offset_clause) > 0:
fmt = \
"""SELECT * FROM
(SELECT %s%s,
ROW_NUMBER()%s AS rn
%s)
%s"""
full_query = fmt % (distinct, select_clause,
order_by_clause, ' '.join(sql).strip(),
limit_and_offset_clause)
else:
full_query = None
if get_full_query:
return select, " ".join(sql), params, full_query
else:
return select, " ".join(sql), params
return OracleQuerySet
OPERATOR_MAPPING = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",

View File

@ -1,194 +0,0 @@
import datetime
from django.db import backend, connection
from django.utils.datastructures import SortedDict
import cx_Oracle as Database
def get_query_set_class(DefaultQuerySet):
"Create a custom QuerySet class for Oracle."
class OracleQuerySet(DefaultQuerySet):
def iterator(self):
"Performs the SELECT database lookup of this QuerySet."
from django.db.models.query import get_cached_row
# self._select is a dictionary, and dictionaries' key order is
# undefined, so we convert it to a list of tuples.
extra_select = self._select.items()
full_query = None
select, sql, params, full_query = self._get_sql_clause()
if not full_query:
full_query = "SELECT %s%s\n%s" % \
((self._distinct and "DISTINCT " or ""),
', '.join(select), sql)
cursor = connection.cursor()
cursor.execute(full_query, params)
fill_cache = self._select_related
index_end = len(self.model._meta.fields)
# so here's the logic;
# 1. retrieve each row in turn
# 2. convert NCLOBs
def resolve_cols(row):
for field in row:
if isinstance(field, Database.LOB):
yield str(field)
# cx_Oracle returns datetime.datetime objects for DATE
# columns, but Django wants a datetime.date.
# A workaround is to return a date if time fields are 0.
# A safer fix would involve either patching cx_Oracle,
# or checking the Model here, neither of which is good.
elif isinstance(field, datetime.datetime) and \
field.hour == field.minute == field.second == field.microsecond == 0:
yield field.date()
else:
yield field
for unresolved_row in cursor:
row = list(resolve_cols(unresolved_row))
if fill_cache:
obj, index_end = get_cached_row(self.model, row, 0)
else:
obj = self.model(*row[:index_end])
for i, k in enumerate(extra_select):
setattr(obj, k[0], row[index_end+i])
yield obj
def _get_sql_clause(self):
from django.db.models.query import fill_table_cache, \
handle_legacy_orderlist, orderfield2column
opts = self.model._meta
# Construct the fundamental parts of the query: SELECT X FROM Y WHERE Z.
select = ["%s.%s" % (backend.quote_name(opts.db_table), backend.quote_name(f.column)) for f in opts.fields]
tables = [quote_only_if_word(t) for t in self._tables]
joins = SortedDict()
where = self._where[:]
params = self._params[:]
# Convert self._filters into SQL.
joins2, where2, params2 = self._filters.get_sql(opts)
joins.update(joins2)
where.extend(where2)
params.extend(params2)
# Add additional tables and WHERE clauses based on select_related.
if self._select_related:
fill_table_cache(opts, select, tables, where, opts.db_table, [opts.db_table])
# Add any additional SELECTs.
if self._select:
select.extend(['(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()])
# Start composing the body of the SQL statement.
sql = [" FROM", backend.quote_name(opts.db_table)]
# Compose the join dictionary into SQL describing the joins.
if joins:
sql.append(" ".join(["%s %s %s ON %s" % (join_type, table, alias, condition)
for (alias, (table, join_type, condition)) in joins.items()]))
# Compose the tables clause into SQL.
if tables:
sql.append(", " + ", ".join(tables))
# Compose the where clause into SQL.
if where:
sql.append(where and "WHERE " + " AND ".join(where))
# ORDER BY clause
order_by = []
if self._order_by is not None:
ordering_to_use = self._order_by
else:
ordering_to_use = opts.ordering
for f in handle_legacy_orderlist(ordering_to_use):
if f == '?': # Special case.
order_by.append(backend.get_random_function_sql())
else:
if f.startswith('-'):
col_name = f[1:]
order = "DESC"
else:
col_name = f
order = "ASC"
if "." in col_name:
table_prefix, col_name = col_name.split('.', 1)
table_prefix = backend.quote_name(table_prefix) + '.'
else:
# Use the database table as a column prefix if it wasn't given,
# and if the requested column isn't a custom SELECT.
if "." not in col_name and col_name not in (self._select or ()):
table_prefix = backend.quote_name(opts.db_table) + '.'
else:
table_prefix = ''
order_by.append('%s%s %s' % (table_prefix, backend.quote_name(orderfield2column(col_name, opts)), order))
if order_by:
sql.append("ORDER BY " + ", ".join(order_by))
# Look for column name collisions in the select elements
# and fix them with an AS alias. This allows us to do a
# SELECT * later in the paging query.
cols = [clause.split('.')[-1] for clause in select]
for index, col in enumerate(cols):
if cols.count(col) > 1:
col = '%s%d' % (col.replace('"', ''), index)
cols[index] = col
select[index] = '%s AS %s' % (select[index], col)
# LIMIT and OFFSET clauses
# To support limits and offsets, Oracle requires some funky rewriting of an otherwise normal looking query.
select_clause = ",".join(select)
distinct = (self._distinct and "DISTINCT " or "")
if order_by:
order_by_clause = " OVER (ORDER BY %s )" % (", ".join(order_by))
else:
#Oracle's row_number() function always requires an order-by clause.
#So we need to define a default order-by, since none was provided.
order_by_clause = " OVER (ORDER BY %s.%s)" % \
(backend.quote_name(opts.db_table),
backend.quote_name(opts.fields[0].db_column or opts.fields[0].column))
# limit_and_offset_clause
if self._limit is None:
assert self._offset is None, "'offset' is not allowed without 'limit'"
if self._offset is not None:
offset = int(self._offset)
else:
offset = 0
if self._limit is not None:
limit = int(self._limit)
else:
limit = None
limit_and_offset_clause = ''
if limit is not None:
limit_and_offset_clause = "WHERE rn > %s AND rn <= %s" % (offset, limit+offset)
elif offset:
limit_and_offset_clause = "WHERE rn > %s" % (offset)
if len(limit_and_offset_clause) > 0:
fmt = \
"""SELECT * FROM
(SELECT %s%s,
ROW_NUMBER()%s AS rn
%s)
%s"""
full_query = fmt % (distinct, select_clause,
order_by_clause, ' '.join(sql).strip(),
limit_and_offset_clause)
else:
full_query = None
return select, " ".join(sql), params, full_query
return OracleQuerySet

View File

@ -846,7 +846,7 @@ class TimeField(Field):
if value is not None:
# MySQL will throw a warning if microseconds are given, because it
# doesn't support microseconds.
if settings.DATABASE_ENGINE == 'mysql':
if settings.DATABASE_ENGINE == 'mysql' and hasattr(value, 'microsecond'):
value = value.replace(microsecond=0)
value = str(value)
elif settings.DATABASE_ENGINE == 'oracle':

View File

@ -169,7 +169,7 @@ class _QuerySet(object):
def iterator(self):
"Performs the SELECT database lookup of this QuerySet."
try:
select, sql, params, full_query = self._get_sql_clause()
select, sql, params = self._get_sql_clause()
except EmptyResultSet:
raise StopIteration
@ -218,7 +218,7 @@ class _QuerySet(object):
counter._limit = None
try:
select, sql, params, full_query = counter._get_sql_clause()
select, sql, params = counter._get_sql_clause()
except EmptyResultSet:
return 0
@ -548,12 +548,11 @@ class _QuerySet(object):
else:
assert self._offset is None, "'offset' is not allowed without 'limit'"
return select, " ".join(sql), params, None
return select, " ".join(sql), params
# Use the backend's QuerySet class if it defines one, otherwise use _QuerySet.
backend_query_module = get_query_module()
if hasattr(backend_query_module, 'get_query_set_class'):
QuerySet = backend_query_module.get_query_set_class(_QuerySet)
if hasattr(backend, 'get_query_set_class'):
QuerySet = backend.get_query_set_class(_QuerySet)
else:
QuerySet = _QuerySet
@ -566,7 +565,7 @@ class ValuesQuerySet(QuerySet):
def iterator(self):
try:
select, sql, params, full_query = self._get_sql_clause()
select, sql, params = self._get_sql_clause()
except EmptyResultSet:
raise StopIteration
@ -601,7 +600,7 @@ class DateQuerySet(QuerySet):
self._where.append('%s.%s IS NOT NULL' % \
(backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column)))
try:
select, sql, params, full_query = self._get_sql_clause()
select, sql, params = self._get_sql_clause()
except EmptyResultSet:
raise StopIteration

162
docs/databases.txt Normal file
View File

@ -0,0 +1,162 @@
===============================
Notes About Supported Databases
===============================
Django attempts to support as many features as possible on all databases.
However, since not all database servers are identical, there is obviously
going to be some variations. This file describes some of the
features that might relevant to Django usage. It is not intended as a
replacement for server-specific documentation or reference manuals.
MySQL Notes
===========
Django expects the database to support transactions, referential integrity,
and Unicode support (UTF-8 encoding). Fortunately MySQL_ has all these
features as available as far back as 3.23. While it may be possible to use
3.23 or 4.0, you will probably have less trouble if you use 4.1 or 5.0.
MySQL-4.1
---------
MySQL-4.1_ has greatly improved support for character sets. It is possible to
set different default character sets on the database, table, and column.
Previous versions have only a server-wide character set setting. It's also the
first version where the character set can be changed on the fly. 4.1 also has
support for views, but these are not currently used by Django.
MySQL-5.0
---------
MySQL-5.0_ adds the ``information_schema`` database, which contains detailed
data on all database schema. This is used for Django's ``inspectdb`` feature,
when it is available. 5.0 also has support for stored procedures, but these
are not currently used by Django.
.. _MySQL: http://www.mysql.com/
.. _MySQL-4.1: http://dev.mysql.com/doc/refman/4.1/en/index.html
.. _MySQL-5.0: http://dev.mysql.com/doc/refman/5.0/en/index.html
Storage Engines
---------------
MySQL has several `storage engines`_ (previously called table types). You can
change the default storage engine in the server configuration.
The default one is MyISAM_. The main drawback of MyISAM is that it does not
currently have support for transactions or foreign keys. On the plus side, it
is currently the only engine that supports full-text indexing and searching.
The InnoDB_ engine is fully transactional and supports foreign key references.
The BDB_ engine, like InnoDB, is also fully transactional and supports foreign
key references. However, it's use seems to be somewhat deprecated.
`Other storage engines`_, including SolidDB_ and Falcon_, are on the horizon.
For now, InnoDB is probably your best choice.
.. _storage engines: http://dev.mysql.com/doc/refman/5.0/en/storage-engines.html
.. _MyISAM: http://dev.mysql.com/doc/refman/5.0/en/myisam-storage-engine.html
.. _BDB: http://dev.mysql.com/doc/refman/5.0/en/bdb-storage-engine.html
.. _InnoDB: http://dev.mysql.com/doc/refman/5.0/en/innodb.html
.. _Other storage engines: http://dev.mysql.com/doc/refman/5.1/en/storage-engines-other.html
.. _SolidDB: http://forge.mysql.com/projects/view.php?id=139
.. _Falcon: http://dev.mysql.com/doc/falcon/en/index.html
MySQLdb
-------
`MySQLdb`_ is the Python interface to MySQL. 1.2.1 is the first version which
has support for MySQL-4.1 and newer. If you are trying to use an older version
of MySQL, then 1.2.0 *may* work for you.
.. _MySQLdb: http://sourceforge.net/projects/mysql-python
Creating your database
~~~~~~~~~~~~~~~~~~~~~~
You can `create your database`_ using the command-line tools and this SQL::
CREATE DATABASE <dbname> CHARACTER SET utf8;
This ensures all tables and columns will use utf8 by default.
.. _create your database: http://dev.mysql.com/doc/refman/5.0/en/create-database.html
Connecting to the database
~~~~~~~~~~~~~~~~~~~~~~~~~~
Refer to the `settings documentation`_.
Connection settings are used in this order:
1. ``DATABASE_OPTIONS``
2. ``DATABASE_NAME``, ``DATABASE_USER``, ``DATABASE_PASSWORD``, ``DATABASE_HOST``,
``DATABASE_PORT``
3. MySQL option files.
In other words, if you set the name of the database in ``DATABASE_OPTIONS``,
this will take precedence over ``DATABASE_NAME``, which would override
anything in a `MySQL option file`_.
Here's a sample configuration which uses a MySQL option file::
# settings.py
DATABASE_ENGINE = "mysql"
DATABASE_OPTIONS = {
'read_default_file': '/path/to/my.cnf',
}
# my.cnf
[client]
database = DATABASE_NAME
user = DATABASE_USER
passwd = DATABASE_PASSWORD
default-character-set = utf8
There are several other MySQLdb connection options which may be useful, such
as ``ssl``, ``use_unicode``, ``init_command``, and ``sql_mode``; consult the
`MySQLdb documentation`_ for more details.
.. _settings documentation: http://www.djangoproject.com/documentation/settings/#database-engine
.. _MySQL option file: http://dev.mysql.com/doc/refman/5.0/en/option-files.html
.. _MySQLdb documentation: http://mysql-python.sourceforge.net/
Creating your tables
~~~~~~~~~~~~~~~~~~~~
When Django generates the schema, it doesn't specify a storage engine, so they
will be created with whatever default `storage engine`__ your database server
is configured for. The easiest solution is to set your database server's default
storage engine to the desired engine.
__ `storage engines`_
If you are using a hosting service and can't change your server's default
storage engine, you have a couple of options.
After the tables is created, all that is needed to convert it to a new storage
engine (such as InnoDB) is::
ALTER TABLE <tablename> ENGINE=INNODB;
With a lot of tables, this can be tedious.
Another option is to use the ``init_command`` option for MySQLdb prior to
creating your tables::
DATABASE_OPTIONS = {
...
"init_command": "SET storage_engine=INNODB",
...
}
This sets the default storage engine upon connecting to the database. After
your tables are set up and running in production, you should remove this
option.
Another method for changing the storage engine is described in
AlterModelOnSyncDB_.
.. _AlterModelOnSyncDB: http://code.djangoproject.com/wiki/AlterModelOnSyncDB

View File

@ -146,7 +146,7 @@ False
# The underlying query only makes one join when a related table is referenced twice.
>>> query = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
>>> null, sql, null, null = query._get_sql_clause()
>>> null, sql, null = query._get_sql_clause()
>>> sql.count('INNER JOIN')
1

View File

@ -54,7 +54,7 @@ def pk_create(pk, klass, data):
def data_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)
testcase.assertEqual(data, instance.data,
"Objects with PK=%d not equal; expected '%s', got '%s'" % (pk,data,instance.data))
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (pk,data, type(data), instance.data, type(instance.data)))
def fk_compare(testcase, pk, klass, data):
instance = klass.objects.get(id=pk)