mirror of
https://github.com/django/django.git
synced 2025-07-04 17:59:13 +00:00
[soc2010/query-refactor] Merged up to trunk r13328.
git-svn-id: http://code.djangoproject.com/svn/django/branches/soc2010/query-refactor@13329 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
9d3e6668d9
commit
a61b34b048
1
AUTHORS
1
AUTHORS
@ -220,6 +220,7 @@ answer newbie questions, and generally made Django that much better:
|
|||||||
Kieran Holland <http://www.kieranholland.com>
|
Kieran Holland <http://www.kieranholland.com>
|
||||||
Sung-Jin Hong <serialx.net@gmail.com>
|
Sung-Jin Hong <serialx.net@gmail.com>
|
||||||
Leo "hylje" Honkanen <sealage@gmail.com>
|
Leo "hylje" Honkanen <sealage@gmail.com>
|
||||||
|
Matt Hoskins <skaffenuk@googlemail.com>
|
||||||
Tareque Hossain <http://www.codexn.com>
|
Tareque Hossain <http://www.codexn.com>
|
||||||
Richard House <Richard.House@i-logue.com>
|
Richard House <Richard.House@i-logue.com>
|
||||||
Robert Rock Howard <http://djangomojo.com/>
|
Robert Rock Howard <http://djangomojo.com/>
|
||||||
|
@ -28,5 +28,5 @@ DATETIME_INPUT_FORMATS = (
|
|||||||
'%Y-%m-%d', # '2006-10-25'
|
'%Y-%m-%d', # '2006-10-25'
|
||||||
)
|
)
|
||||||
DECIMAL_SEPARATOR = ','
|
DECIMAL_SEPARATOR = ','
|
||||||
THOUSAND_SEPARATOR = '.'
|
THOUSAND_SEPARATOR = ' '
|
||||||
NUMBER_GROUPING = 3
|
NUMBER_GROUPING = 3
|
||||||
|
@ -137,9 +137,8 @@ class BaseHandler(object):
|
|||||||
raise
|
raise
|
||||||
except: # Handle everything else, including SuspiciousOperation, etc.
|
except: # Handle everything else, including SuspiciousOperation, etc.
|
||||||
# Get the exception info now, in case another exception is thrown later.
|
# Get the exception info now, in case another exception is thrown later.
|
||||||
exc_info = sys.exc_info()
|
|
||||||
receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
|
receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
|
||||||
return self.handle_uncaught_exception(request, resolver, exc_info)
|
return self.handle_uncaught_exception(request, resolver, sys.exc_info())
|
||||||
finally:
|
finally:
|
||||||
# Reset URLconf for this thread on the way out for complete
|
# Reset URLconf for this thread on the way out for complete
|
||||||
# isolation of request.urlconf
|
# isolation of request.urlconf
|
||||||
|
@ -213,20 +213,24 @@ class BaseCommand(object):
|
|||||||
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
|
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
try:
|
try:
|
||||||
|
self.stdout = options.get('stdout', sys.stdout)
|
||||||
|
self.stderr = options.get('stderr', sys.stderr)
|
||||||
if self.requires_model_validation:
|
if self.requires_model_validation:
|
||||||
self.validate()
|
self.validate()
|
||||||
output = self.handle(*args, **options)
|
output = self.handle(*args, **options)
|
||||||
if output:
|
if output:
|
||||||
if self.output_transaction:
|
if self.output_transaction:
|
||||||
# This needs to be imported here, because it relies on settings.
|
# This needs to be imported here, because it relies on
|
||||||
from django.db import connection
|
# settings.
|
||||||
|
from django.db import connections, DEFAULT_DB_ALIAS
|
||||||
|
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
|
||||||
if connection.ops.start_transaction_sql():
|
if connection.ops.start_transaction_sql():
|
||||||
print self.style.SQL_KEYWORD(connection.ops.start_transaction_sql())
|
self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()))
|
||||||
print output
|
self.stdout.write(output)
|
||||||
if self.output_transaction:
|
if self.output_transaction:
|
||||||
print self.style.SQL_KEYWORD("COMMIT;")
|
self.stdout.write(self.style.SQL_KEYWORD("COMMIT;") + '\n')
|
||||||
except CommandError, e:
|
except CommandError, e:
|
||||||
sys.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
|
self.stderr.write(smart_str(self.style.ERROR('Error: %s\n' % e)))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def validate(self, app=None, display_num_errors=False):
|
def validate(self, app=None, display_num_errors=False):
|
||||||
@ -248,7 +252,7 @@ class BaseCommand(object):
|
|||||||
error_text = s.read()
|
error_text = s.read()
|
||||||
raise CommandError("One or more models did not validate:\n%s" % error_text)
|
raise CommandError("One or more models did not validate:\n%s" % error_text)
|
||||||
if display_num_errors:
|
if display_num_errors:
|
||||||
print "%s error%s found" % (num_errors, num_errors != 1 and 's' or '')
|
self.stdout.write("%s error%s found\n" % (num_errors, num_errors != 1 and 's' or ''))
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""
|
"""
|
||||||
|
@ -112,10 +112,10 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
if formats:
|
if formats:
|
||||||
if verbosity > 1:
|
if verbosity > 1:
|
||||||
print "Loading '%s' fixtures..." % fixture_name
|
self.stdout.write("Loading '%s' fixtures...\n" % fixture_name)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
|
self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format.\n" %
|
||||||
(fixture_name, format)))
|
(fixture_name, format)))
|
||||||
transaction.rollback(using=using)
|
transaction.rollback(using=using)
|
||||||
transaction.leave_transaction_management(using=using)
|
transaction.leave_transaction_management(using=using)
|
||||||
@ -128,7 +128,7 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
for fixture_dir in fixture_dirs:
|
for fixture_dir in fixture_dirs:
|
||||||
if verbosity > 1:
|
if verbosity > 1:
|
||||||
print "Checking %s for fixtures..." % humanize(fixture_dir)
|
self.stdout.write("Checking %s for fixtures...\n" % humanize(fixture_dir))
|
||||||
|
|
||||||
label_found = False
|
label_found = False
|
||||||
for combo in product([using, None], formats, compression_formats):
|
for combo in product([using, None], formats, compression_formats):
|
||||||
@ -141,16 +141,16 @@ class Command(BaseCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if verbosity > 1:
|
if verbosity > 1:
|
||||||
print "Trying %s for %s fixture '%s'..." % \
|
self.stdout.write("Trying %s for %s fixture '%s'...\n" % \
|
||||||
(humanize(fixture_dir), file_name, fixture_name)
|
(humanize(fixture_dir), file_name, fixture_name))
|
||||||
full_path = os.path.join(fixture_dir, file_name)
|
full_path = os.path.join(fixture_dir, file_name)
|
||||||
open_method = compression_types[compression_format]
|
open_method = compression_types[compression_format]
|
||||||
try:
|
try:
|
||||||
fixture = open_method(full_path, 'r')
|
fixture = open_method(full_path, 'r')
|
||||||
if label_found:
|
if label_found:
|
||||||
fixture.close()
|
fixture.close()
|
||||||
print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
|
self.stderr.write(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting.\n" %
|
||||||
(fixture_name, humanize(fixture_dir)))
|
(fixture_name, humanize(fixture_dir))))
|
||||||
transaction.rollback(using=using)
|
transaction.rollback(using=using)
|
||||||
transaction.leave_transaction_management(using=using)
|
transaction.leave_transaction_management(using=using)
|
||||||
return
|
return
|
||||||
@ -158,8 +158,8 @@ class Command(BaseCommand):
|
|||||||
fixture_count += 1
|
fixture_count += 1
|
||||||
objects_in_fixture = 0
|
objects_in_fixture = 0
|
||||||
if verbosity > 0:
|
if verbosity > 0:
|
||||||
print "Installing %s fixture '%s' from %s." % \
|
self.stdout.write("Installing %s fixture '%s' from %s.\n" % \
|
||||||
(format, fixture_name, humanize(fixture_dir))
|
(format, fixture_name, humanize(fixture_dir)))
|
||||||
try:
|
try:
|
||||||
objects = serializers.deserialize(format, fixture, using=using)
|
objects = serializers.deserialize(format, fixture, using=using)
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
@ -190,7 +190,7 @@ class Command(BaseCommand):
|
|||||||
# error was encountered during fixture loading.
|
# error was encountered during fixture loading.
|
||||||
if objects_in_fixture == 0:
|
if objects_in_fixture == 0:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
|
self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)\n" %
|
||||||
(fixture_name)))
|
(fixture_name)))
|
||||||
transaction.rollback(using=using)
|
transaction.rollback(using=using)
|
||||||
transaction.leave_transaction_management(using=using)
|
transaction.leave_transaction_management(using=using)
|
||||||
@ -198,8 +198,8 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
if verbosity > 1:
|
if verbosity > 1:
|
||||||
print "No %s fixture '%s' in %s." % \
|
self.stdout.write("No %s fixture '%s' in %s.\n" % \
|
||||||
(format, fixture_name, humanize(fixture_dir))
|
(format, fixture_name, humanize(fixture_dir)))
|
||||||
|
|
||||||
# If we found even one object in a fixture, we need to reset the
|
# If we found even one object in a fixture, we need to reset the
|
||||||
# database sequences.
|
# database sequences.
|
||||||
@ -207,7 +207,7 @@ class Command(BaseCommand):
|
|||||||
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
|
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
|
||||||
if sequence_sql:
|
if sequence_sql:
|
||||||
if verbosity > 1:
|
if verbosity > 1:
|
||||||
print "Resetting sequences"
|
self.stdout.write("Resetting sequences\n")
|
||||||
for line in sequence_sql:
|
for line in sequence_sql:
|
||||||
cursor.execute(line)
|
cursor.execute(line)
|
||||||
|
|
||||||
@ -217,10 +217,10 @@ class Command(BaseCommand):
|
|||||||
|
|
||||||
if object_count == 0:
|
if object_count == 0:
|
||||||
if verbosity > 0:
|
if verbosity > 0:
|
||||||
print "No fixtures found."
|
self.stdout.write("No fixtures found.\n")
|
||||||
else:
|
else:
|
||||||
if verbosity > 0:
|
if verbosity > 0:
|
||||||
print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
|
self.stdout.write("Installed %d object(s) from %d fixture(s)\n" % (object_count, fixture_count))
|
||||||
|
|
||||||
# Close the DB connection. This is required as a workaround for an
|
# Close the DB connection. This is required as a workaround for an
|
||||||
# edge case in MySQL: if the same connection is used to
|
# edge case in MySQL: if the same connection is used to
|
||||||
|
@ -353,6 +353,11 @@ class BaseDatabaseOperations(object):
|
|||||||
"""
|
"""
|
||||||
return "BEGIN;"
|
return "BEGIN;"
|
||||||
|
|
||||||
|
def end_transaction_sql(self, success=True):
|
||||||
|
if not success:
|
||||||
|
return "ROLLBACK;"
|
||||||
|
return "COMMIT;"
|
||||||
|
|
||||||
def tablespace_sql(self, tablespace, inline=False):
|
def tablespace_sql(self, tablespace, inline=False):
|
||||||
"""
|
"""
|
||||||
Returns the SQL that will be appended to tables or rows to define
|
Returns the SQL that will be appended to tables or rows to define
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from django.db.backends.creation import BaseDatabaseCreation
|
from django.db.backends.creation import BaseDatabaseCreation
|
||||||
|
from django.db.backends.util import truncate_name
|
||||||
|
|
||||||
class DatabaseCreation(BaseDatabaseCreation):
|
class DatabaseCreation(BaseDatabaseCreation):
|
||||||
# This dictionary maps Field objects to their associated PostgreSQL column
|
# This dictionary maps Field objects to their associated PostgreSQL column
|
||||||
@ -51,7 +52,7 @@ class DatabaseCreation(BaseDatabaseCreation):
|
|||||||
|
|
||||||
def get_index_sql(index_name, opclass=''):
|
def get_index_sql(index_name, opclass=''):
|
||||||
return (style.SQL_KEYWORD('CREATE INDEX') + ' ' +
|
return (style.SQL_KEYWORD('CREATE INDEX') + ' ' +
|
||||||
style.SQL_TABLE(qn(index_name)) + ' ' +
|
style.SQL_TABLE(qn(truncate_name(index_name,self.connection.ops.max_name_length()))) + ' ' +
|
||||||
style.SQL_KEYWORD('ON') + ' ' +
|
style.SQL_KEYWORD('ON') + ' ' +
|
||||||
style.SQL_TABLE(qn(db_table)) + ' ' +
|
style.SQL_TABLE(qn(db_table)) + ' ' +
|
||||||
"(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) +
|
"(%s%s)" % (style.SQL_FIELD(qn(f.column)), opclass) +
|
||||||
|
@ -54,7 +54,9 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
return '%s'
|
return '%s'
|
||||||
|
|
||||||
def last_insert_id(self, cursor, table_name, pk_name):
|
def last_insert_id(self, cursor, table_name, pk_name):
|
||||||
cursor.execute("SELECT CURRVAL('\"%s_%s_seq\"')" % (table_name, pk_name))
|
# Use pg_get_serial_sequence to get the underlying sequence name
|
||||||
|
# from the table name and column name (available since PostgreSQL 8)
|
||||||
|
cursor.execute("SELECT CURRVAL(pg_get_serial_sequence('%s','%s'))" % (table_name, pk_name))
|
||||||
return cursor.fetchone()[0]
|
return cursor.fetchone()[0]
|
||||||
|
|
||||||
def no_limit_value(self):
|
def no_limit_value(self):
|
||||||
@ -90,13 +92,14 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
for sequence_info in sequences:
|
for sequence_info in sequences:
|
||||||
table_name = sequence_info['table']
|
table_name = sequence_info['table']
|
||||||
column_name = sequence_info['column']
|
column_name = sequence_info['column']
|
||||||
if column_name and len(column_name) > 0:
|
if not (column_name and len(column_name) > 0):
|
||||||
sequence_name = '%s_%s_seq' % (table_name, column_name)
|
# This will be the case if it's an m2m using an autogenerated
|
||||||
else:
|
# intermediate table (see BaseDatabaseIntrospection.sequence_list)
|
||||||
sequence_name = '%s_id_seq' % table_name
|
column_name = 'id'
|
||||||
sql.append("%s setval('%s', 1, false);" % \
|
sql.append("%s setval(pg_get_serial_sequence('%s','%s'), 1, false);" % \
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_FIELD(self.quote_name(sequence_name)))
|
style.SQL_TABLE(table_name),
|
||||||
|
style.SQL_FIELD(column_name))
|
||||||
)
|
)
|
||||||
return sql
|
return sql
|
||||||
else:
|
else:
|
||||||
@ -110,11 +113,15 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
|
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
|
||||||
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
|
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
|
||||||
# if there are records (as the max pk value is already in use), otherwise set it to false.
|
# if there are records (as the max pk value is already in use), otherwise set it to false.
|
||||||
|
# Use pg_get_serial_sequence to get the underlying sequence name from the table name
|
||||||
|
# and column name (available since PostgreSQL 8)
|
||||||
|
|
||||||
for f in model._meta.local_fields:
|
for f in model._meta.local_fields:
|
||||||
if isinstance(f, models.AutoField):
|
if isinstance(f, models.AutoField):
|
||||||
output.append("%s setval('%s', coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_FIELD(qn('%s_%s_seq' % (model._meta.db_table, f.column))),
|
style.SQL_TABLE(model._meta.db_table),
|
||||||
|
style.SQL_FIELD(f.column),
|
||||||
style.SQL_FIELD(qn(f.column)),
|
style.SQL_FIELD(qn(f.column)),
|
||||||
style.SQL_FIELD(qn(f.column)),
|
style.SQL_FIELD(qn(f.column)),
|
||||||
style.SQL_KEYWORD('IS NOT'),
|
style.SQL_KEYWORD('IS NOT'),
|
||||||
@ -123,9 +130,10 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
break # Only one AutoField is allowed per model, so don't bother continuing.
|
break # Only one AutoField is allowed per model, so don't bother continuing.
|
||||||
for f in model._meta.many_to_many:
|
for f in model._meta.many_to_many:
|
||||||
if not f.rel.through:
|
if not f.rel.through:
|
||||||
output.append("%s setval('%s', coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
output.append("%s setval(pg_get_serial_sequence('%s','%s'), coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
style.SQL_FIELD(qn('%s_id_seq' % f.m2m_db_table())),
|
style.SQL_TABLE(model._meta.db_table),
|
||||||
|
style.SQL_FIELD('id'),
|
||||||
style.SQL_FIELD(qn('id')),
|
style.SQL_FIELD(qn('id')),
|
||||||
style.SQL_FIELD(qn('id')),
|
style.SQL_FIELD(qn('id')),
|
||||||
style.SQL_KEYWORD('IS NOT'),
|
style.SQL_KEYWORD('IS NOT'),
|
||||||
|
@ -177,14 +177,14 @@ class QueryDict(MultiValueDict):
|
|||||||
super(QueryDict, self).__delitem__(key)
|
super(QueryDict, self).__delitem__(key)
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
result = self.__class__('', mutable=True)
|
result = self.__class__('', mutable=True, encoding=self.encoding)
|
||||||
for key, value in dict.items(self):
|
for key, value in dict.items(self):
|
||||||
dict.__setitem__(result, key, value)
|
dict.__setitem__(result, key, value)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def __deepcopy__(self, memo):
|
def __deepcopy__(self, memo):
|
||||||
import django.utils.copycompat as copy
|
import django.utils.copycompat as copy
|
||||||
result = self.__class__('', mutable=True)
|
result = self.__class__('', mutable=True, encoding=self.encoding)
|
||||||
memo[id(self)] = result
|
memo[id(self)] = result
|
||||||
for key, value in dict.items(self):
|
for key, value in dict.items(self):
|
||||||
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
|
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
|
||||||
|
@ -466,6 +466,9 @@ class TransactionTestCase(unittest.TestCase):
|
|||||||
msg_prefix + "Template '%s' was used unexpectedly in rendering"
|
msg_prefix + "Template '%s' was used unexpectedly in rendering"
|
||||||
" the response" % template_name)
|
" the response" % template_name)
|
||||||
|
|
||||||
|
def assertQuerysetEqual(self, qs, values, transform=repr):
|
||||||
|
return self.assertEqual(map(transform, qs), values)
|
||||||
|
|
||||||
def connections_support_transactions():
|
def connections_support_transactions():
|
||||||
"""
|
"""
|
||||||
Returns True if all connections support transactions. This is messy
|
Returns True if all connections support transactions. This is messy
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
"""
|
"""
|
||||||
The md5 and sha modules are deprecated since Python 2.5, replaced by the
|
The md5 and sha modules are deprecated since Python 2.5, replaced by the
|
||||||
hashlib module containing both hash algorithms. Here, we provide a common
|
hashlib module containing both hash algorithms. Here, we provide a common
|
||||||
interface to the md5 and sha constructors, preferring the hashlib module when
|
interface to the md5 and sha constructors, depending on system version.
|
||||||
available.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
import sys
|
||||||
|
if sys.version_info >= (2, 5):
|
||||||
import hashlib
|
import hashlib
|
||||||
md5_constructor = hashlib.md5
|
md5_constructor = hashlib.md5
|
||||||
md5_hmac = md5_constructor
|
md5_hmac = md5_constructor
|
||||||
sha_constructor = hashlib.sha1
|
sha_constructor = hashlib.sha1
|
||||||
sha_hmac = sha_constructor
|
sha_hmac = sha_constructor
|
||||||
except ImportError:
|
else:
|
||||||
import md5
|
import md5
|
||||||
md5_constructor = md5.new
|
md5_constructor = md5.new
|
||||||
md5_hmac = md5
|
md5_hmac = md5
|
||||||
|
@ -12,7 +12,7 @@ from django.utils.importlib import import_module
|
|||||||
from django.utils.encoding import smart_unicode, smart_str
|
from django.utils.encoding import smart_unicode, smart_str
|
||||||
|
|
||||||
|
|
||||||
HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST')
|
HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST|SIGNATURE')
|
||||||
|
|
||||||
def linebreak_iter(template_source):
|
def linebreak_iter(template_source):
|
||||||
yield 0
|
yield 0
|
||||||
|
@ -8,7 +8,7 @@ Writing custom django-admin commands
|
|||||||
|
|
||||||
Applications can register their own actions with ``manage.py``. For example,
|
Applications can register their own actions with ``manage.py``. For example,
|
||||||
you might want to add a ``manage.py`` action for a Django app that you're
|
you might want to add a ``manage.py`` action for a Django app that you're
|
||||||
distributing. In this document, we will be building a custom ``closepoll``
|
distributing. In this document, we will be building a custom ``closepoll``
|
||||||
command for the ``polls`` application from the
|
command for the ``polls`` application from the
|
||||||
:ref:`tutorial<intro-tutorial01>`.
|
:ref:`tutorial<intro-tutorial01>`.
|
||||||
|
|
||||||
@ -62,9 +62,16 @@ look like this:
|
|||||||
poll.opened = False
|
poll.opened = False
|
||||||
poll.save()
|
poll.save()
|
||||||
|
|
||||||
print 'Successfully closed poll "%s"' % poll_id
|
self.stdout.write('Successfully closed poll "%s"\n' % poll_id)
|
||||||
|
|
||||||
The new custom command can be called using ``python manage.py closepoll
|
.. note::
|
||||||
|
When you are using management commands and wish to provide console
|
||||||
|
output, you should write to ``self.stdout`` and ``self.stderr``,
|
||||||
|
instead of printing to ``stdout`` and ``stderr`` directly. By
|
||||||
|
using these proxies, it becomes much easier to test your custom
|
||||||
|
command.
|
||||||
|
|
||||||
|
The new custom command can be called using ``python manage.py closepoll
|
||||||
<poll_id>``.
|
<poll_id>``.
|
||||||
|
|
||||||
The ``handle()`` method takes zero or more ``poll_ids`` and sets ``poll.opened``
|
The ``handle()`` method takes zero or more ``poll_ids`` and sets ``poll.opened``
|
||||||
@ -91,8 +98,8 @@ must be added to :attr:`~BaseCommand.option_list` like this:
|
|||||||
)
|
)
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
In addition to being able to add custom command line options, all
|
In addition to being able to add custom command line options, all
|
||||||
:ref:`management commands<ref-django-admin>` can accept some
|
:ref:`management commands<ref-django-admin>` can accept some
|
||||||
default options such as :djadminopt:`--verbosity` and :djadminopt:`--traceback`.
|
default options such as :djadminopt:`--verbosity` and :djadminopt:`--traceback`.
|
||||||
|
|
||||||
Command objects
|
Command objects
|
||||||
@ -113,7 +120,7 @@ Subclassing the :class:`BaseCommand` class requires that you implement the
|
|||||||
Attributes
|
Attributes
|
||||||
----------
|
----------
|
||||||
|
|
||||||
All attributes can be set in your derived class and can be used in
|
All attributes can be set in your derived class and can be used in
|
||||||
:class:`BaseCommand`'s :ref:`subclasses<ref-basecommand-subclasses>`.
|
:class:`BaseCommand`'s :ref:`subclasses<ref-basecommand-subclasses>`.
|
||||||
|
|
||||||
.. attribute:: BaseCommand.args
|
.. attribute:: BaseCommand.args
|
||||||
@ -133,7 +140,7 @@ All attributes can be set in your derived class and can be used in
|
|||||||
.. attribute:: BaseCommand.help
|
.. attribute:: BaseCommand.help
|
||||||
|
|
||||||
A short description of the command, which will be printed in the
|
A short description of the command, which will be printed in the
|
||||||
help message when the user runs the command
|
help message when the user runs the command
|
||||||
``python manage.py help <command>``.
|
``python manage.py help <command>``.
|
||||||
|
|
||||||
.. attribute:: BaseCommand.option_list
|
.. attribute:: BaseCommand.option_list
|
||||||
@ -230,7 +237,7 @@ Rather than implementing :meth:`~BaseCommand.handle`, subclasses must implement
|
|||||||
A command which takes no arguments on the command line.
|
A command which takes no arguments on the command line.
|
||||||
|
|
||||||
Rather than implementing :meth:`~BaseCommand.handle`, subclasses must implement
|
Rather than implementing :meth:`~BaseCommand.handle`, subclasses must implement
|
||||||
:meth:`~NoArgsCommand.handle_noargs`; :meth:`~BaseCommand.handle` itself is
|
:meth:`~NoArgsCommand.handle_noargs`; :meth:`~BaseCommand.handle` itself is
|
||||||
overridden to ensure no arguments are passed to the command.
|
overridden to ensure no arguments are passed to the command.
|
||||||
|
|
||||||
.. method:: NoArgsCommand.handle_noargs(**options)
|
.. method:: NoArgsCommand.handle_noargs(**options)
|
||||||
|
@ -1189,7 +1189,7 @@ your admin page for managing the relation.
|
|||||||
|
|
||||||
In all other respects, the ``InlineModelAdmin`` is exactly the same as any
|
In all other respects, the ``InlineModelAdmin`` is exactly the same as any
|
||||||
other. You can customize the appearance using any of the normal
|
other. You can customize the appearance using any of the normal
|
||||||
``InlineModelAdmin`` properties.
|
``ModelAdmin`` properties.
|
||||||
|
|
||||||
Working with Many-to-Many Intermediary Models
|
Working with Many-to-Many Intermediary Models
|
||||||
----------------------------------------------
|
----------------------------------------------
|
||||||
|
@ -441,7 +441,7 @@ to be displayed.
|
|||||||
|
|
||||||
See also ``DATETIME_INPUT_FORMATS`` and ``TIME_INPUT_FORMATS``.
|
See also ``DATETIME_INPUT_FORMATS`` and ``TIME_INPUT_FORMATS``.
|
||||||
|
|
||||||
.. _datetime: http://docs.python.org/library/datetime.html#strftime-behavior
|
.. _datetime: http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
|
||||||
.. setting:: DATETIME_FORMAT
|
.. setting:: DATETIME_FORMAT
|
||||||
|
|
||||||
@ -481,7 +481,7 @@ to be displayed.
|
|||||||
|
|
||||||
See also ``DATE_INPUT_FORMATS`` and ``TIME_INPUT_FORMATS``.
|
See also ``DATE_INPUT_FORMATS`` and ``TIME_INPUT_FORMATS``.
|
||||||
|
|
||||||
.. _datetime: http://docs.python.org/library/datetime.html#strftime-behavior
|
.. _datetime: http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
|
||||||
.. setting:: DEBUG
|
.. setting:: DEBUG
|
||||||
|
|
||||||
@ -494,8 +494,9 @@ A boolean that turns on/off debug mode.
|
|||||||
|
|
||||||
If you define custom settings, `django/views/debug.py`_ has a ``HIDDEN_SETTINGS``
|
If you define custom settings, `django/views/debug.py`_ has a ``HIDDEN_SETTINGS``
|
||||||
regular expression which will hide from the DEBUG view anything that contains
|
regular expression which will hide from the DEBUG view anything that contains
|
||||||
``'SECRET'``, ``'PASSWORD'``, or ``'PROFANITIES'``. This allows untrusted users to
|
``'SECRET'``, ``'PASSWORD'``, ``'PROFANITIES'``, or ``'SIGNATURE'``. This allows
|
||||||
be able to give backtraces without seeing sensitive (or offensive) settings.
|
untrusted users to be able to give backtraces without seeing sensitive (or
|
||||||
|
offensive) settings.
|
||||||
|
|
||||||
Still, note that there are always going to be sections of your debug output that
|
Still, note that there are always going to be sections of your debug output that
|
||||||
are inappropriate for public consumption. File paths, configuration options, and
|
are inappropriate for public consumption. File paths, configuration options, and
|
||||||
@ -615,7 +616,7 @@ EMAIL_BACKEND
|
|||||||
|
|
||||||
.. versionadded:: 1.2
|
.. versionadded:: 1.2
|
||||||
|
|
||||||
Default: ``'django.core.mail.backends.smtp'``
|
Default: ``'django.core.mail.backends.smtp.EmailBackend'``
|
||||||
|
|
||||||
The backend to use for sending emails. For the list of available backends see
|
The backend to use for sending emails. For the list of available backends see
|
||||||
:ref:`topics-email`.
|
:ref:`topics-email`.
|
||||||
@ -1531,7 +1532,7 @@ to be displayed.
|
|||||||
|
|
||||||
See also ``DATE_INPUT_FORMATS`` and ``DATETIME_INPUT_FORMATS``.
|
See also ``DATE_INPUT_FORMATS`` and ``DATETIME_INPUT_FORMATS``.
|
||||||
|
|
||||||
.. _datetime: http://docs.python.org/library/datetime.html#strftime-behavior
|
.. _datetime: http://docs.python.org/library/datetime.html#strftime-strptime-behavior
|
||||||
|
|
||||||
.. setting:: TIME_ZONE
|
.. setting:: TIME_ZONE
|
||||||
|
|
||||||
|
@ -1248,6 +1248,19 @@ cause of an failure in your test suite.
|
|||||||
``target_status_code`` will be the url and status code for the final
|
``target_status_code`` will be the url and status code for the final
|
||||||
point of the redirect chain.
|
point of the redirect chain.
|
||||||
|
|
||||||
|
.. method:: TestCase.assertQuerysetEqual(response, qs, values, transform=repr)
|
||||||
|
|
||||||
|
Asserts that a queryset ``qs`` returns a particular list of values ``values``.
|
||||||
|
|
||||||
|
The comparison of the contents of ``qs`` and ``values`` is performed using
|
||||||
|
the function ``transform``; by default, this means that the ``repr()`` of
|
||||||
|
each value is compared. Any other callable can be used if ``repr()`` doesn't
|
||||||
|
provide a unique or helpful comparison.
|
||||||
|
|
||||||
|
The comparison is also ordering dependent. If ``qs`` doesn't provide an
|
||||||
|
implicit ordering, you will need to apply a ``order_by()`` clause to your
|
||||||
|
queryset to ensure that the test will pass reliably.
|
||||||
|
|
||||||
.. _topics-testing-email:
|
.. _topics-testing-email:
|
||||||
|
|
||||||
E-mail services
|
E-mail services
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
class Author(models.Model):
|
class Author(models.Model):
|
||||||
name = models.CharField(max_length=100)
|
name = models.CharField(max_length=100)
|
||||||
age = models.IntegerField()
|
age = models.IntegerField()
|
||||||
@ -39,323 +40,3 @@ class Store(models.Model):
|
|||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
# Tests on 'aggregate'
|
|
||||||
# Different backends and numbers.
|
|
||||||
__test__ = {'API_TESTS': """
|
|
||||||
>>> from django.core import management
|
|
||||||
>>> from decimal import Decimal
|
|
||||||
>>> from datetime import date
|
|
||||||
|
|
||||||
# Reset the database representation of this app.
|
|
||||||
# This will return the database to a clean initial state.
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Empty Call - request nothing, get nothing.
|
|
||||||
>>> Author.objects.all().aggregate()
|
|
||||||
{}
|
|
||||||
|
|
||||||
>>> from django.db.models import Avg, Sum, Count, Max, Min
|
|
||||||
|
|
||||||
# Single model aggregation
|
|
||||||
#
|
|
||||||
|
|
||||||
# Single aggregate
|
|
||||||
# Average age of Authors
|
|
||||||
>>> Author.objects.all().aggregate(Avg('age'))
|
|
||||||
{'age__avg': 37.4...}
|
|
||||||
|
|
||||||
# Multiple aggregates
|
|
||||||
# Average and Sum of Author ages
|
|
||||||
>>> Author.objects.all().aggregate(Sum('age'), Avg('age'))
|
|
||||||
{'age__sum': 337, 'age__avg': 37.4...}
|
|
||||||
|
|
||||||
# Aggreates interact with filters, and only
|
|
||||||
# generate aggregate values for the filtered values
|
|
||||||
# Sum of the age of those older than 29 years old
|
|
||||||
>>> Author.objects.all().filter(age__gt=29).aggregate(Sum('age'))
|
|
||||||
{'age__sum': 254}
|
|
||||||
|
|
||||||
# Depth-1 Joins
|
|
||||||
#
|
|
||||||
|
|
||||||
# On Relationships with self
|
|
||||||
# Average age of the friends of each author
|
|
||||||
>>> Author.objects.all().aggregate(Avg('friends__age'))
|
|
||||||
{'friends__age__avg': 34.07...}
|
|
||||||
|
|
||||||
# On ManyToMany Relationships
|
|
||||||
#
|
|
||||||
|
|
||||||
# Forward
|
|
||||||
# Average age of the Authors of Books with a rating of less than 4.5
|
|
||||||
>>> Book.objects.all().filter(rating__lt=4.5).aggregate(Avg('authors__age'))
|
|
||||||
{'authors__age__avg': 38.2...}
|
|
||||||
|
|
||||||
# Backward
|
|
||||||
# Average rating of the Books whose Author's name contains the letter 'a'
|
|
||||||
>>> Author.objects.all().filter(name__contains='a').aggregate(Avg('book__rating'))
|
|
||||||
{'book__rating__avg': 4.0}
|
|
||||||
|
|
||||||
# On OneToMany Relationships
|
|
||||||
#
|
|
||||||
|
|
||||||
# Forward
|
|
||||||
# Sum of the number of awards of each Book's Publisher
|
|
||||||
>>> Book.objects.all().aggregate(Sum('publisher__num_awards'))
|
|
||||||
{'publisher__num_awards__sum': 30}
|
|
||||||
|
|
||||||
# Backward
|
|
||||||
# Sum of the price of every Book that has a Publisher
|
|
||||||
>>> Publisher.objects.all().aggregate(Sum('book__price'))
|
|
||||||
{'book__price__sum': Decimal("270.27")}
|
|
||||||
|
|
||||||
# Multiple Joins
|
|
||||||
#
|
|
||||||
|
|
||||||
# Forward
|
|
||||||
>>> Store.objects.all().aggregate(Max('books__authors__age'))
|
|
||||||
{'books__authors__age__max': 57}
|
|
||||||
|
|
||||||
# Backward
|
|
||||||
# Note that the very long default alias may be truncated
|
|
||||||
>>> Author.objects.all().aggregate(Min('book__publisher__num_awards'))
|
|
||||||
{'book__publisher__num_award...': 1}
|
|
||||||
|
|
||||||
# Aggregate outputs can also be aliased.
|
|
||||||
|
|
||||||
# Average amazon.com Book rating
|
|
||||||
>>> Store.objects.filter(name='Amazon.com').aggregate(amazon_mean=Avg('books__rating'))
|
|
||||||
{'amazon_mean': 4.08...}
|
|
||||||
|
|
||||||
# Tests on annotate()
|
|
||||||
|
|
||||||
# An empty annotate call does nothing but return the same QuerySet
|
|
||||||
>>> Book.objects.all().annotate().order_by('pk')
|
|
||||||
[<Book: The Definitive Guide to Django: Web Development Done Right>, <Book: Sams Teach Yourself Django in 24 Hours>, <Book: Practical Django Projects>, <Book: Python Web Development with Django>, <Book: Artificial Intelligence: A Modern Approach>, <Book: Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp>]
|
|
||||||
|
|
||||||
# Annotate inserts the alias into the model object with the aggregated result
|
|
||||||
>>> books = Book.objects.all().annotate(mean_age=Avg('authors__age'))
|
|
||||||
>>> books.get(pk=1).name
|
|
||||||
u'The Definitive Guide to Django: Web Development Done Right'
|
|
||||||
|
|
||||||
>>> books.get(pk=1).mean_age
|
|
||||||
34.5
|
|
||||||
|
|
||||||
# On ManyToMany Relationships
|
|
||||||
|
|
||||||
# Forward
|
|
||||||
# Average age of the Authors of each book with a rating less than 4.5
|
|
||||||
>>> books = Book.objects.all().filter(rating__lt=4.5).annotate(Avg('authors__age'))
|
|
||||||
>>> sorted([(b.name, b.authors__age__avg) for b in books])
|
|
||||||
[(u'Artificial Intelligence: A Modern Approach', 51.5), (u'Practical Django Projects', 29.0), (u'Python Web Development with Django', 30.3...), (u'Sams Teach Yourself Django in 24 Hours', 45.0)]
|
|
||||||
|
|
||||||
# Count the number of authors of each book
|
|
||||||
>>> books = Book.objects.annotate(num_authors=Count('authors'))
|
|
||||||
>>> sorted([(b.name, b.num_authors) for b in books])
|
|
||||||
[(u'Artificial Intelligence: A Modern Approach', 2), (u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1), (u'Practical Django Projects', 1), (u'Python Web Development with Django', 3), (u'Sams Teach Yourself Django in 24 Hours', 1), (u'The Definitive Guide to Django: Web Development Done Right', 2)]
|
|
||||||
|
|
||||||
# Backward
|
|
||||||
# Average rating of the Books whose Author's names contains the letter 'a'
|
|
||||||
>>> authors = Author.objects.all().filter(name__contains='a').annotate(Avg('book__rating'))
|
|
||||||
>>> sorted([(a.name, a.book__rating__avg) for a in authors])
|
|
||||||
[(u'Adrian Holovaty', 4.5), (u'Brad Dayley', 3.0), (u'Jacob Kaplan-Moss', 4.5), (u'James Bennett', 4.0), (u'Paul Bissex', 4.0), (u'Stuart Russell', 4.0)]
|
|
||||||
|
|
||||||
# Count the number of books written by each author
|
|
||||||
>>> authors = Author.objects.annotate(num_books=Count('book'))
|
|
||||||
>>> sorted([(a.name, a.num_books) for a in authors])
|
|
||||||
[(u'Adrian Holovaty', 1), (u'Brad Dayley', 1), (u'Jacob Kaplan-Moss', 1), (u'James Bennett', 1), (u'Jeffrey Forcier', 1), (u'Paul Bissex', 1), (u'Peter Norvig', 2), (u'Stuart Russell', 1), (u'Wesley J. Chun', 1)]
|
|
||||||
|
|
||||||
# On OneToMany Relationships
|
|
||||||
|
|
||||||
# Forward
|
|
||||||
# Annotate each book with the number of awards of each Book's Publisher
|
|
||||||
>>> books = Book.objects.all().annotate(Sum('publisher__num_awards'))
|
|
||||||
>>> sorted([(b.name, b.publisher__num_awards__sum) for b in books])
|
|
||||||
[(u'Artificial Intelligence: A Modern Approach', 7), (u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9), (u'Practical Django Projects', 3), (u'Python Web Development with Django', 7), (u'Sams Teach Yourself Django in 24 Hours', 1), (u'The Definitive Guide to Django: Web Development Done Right', 3)]
|
|
||||||
|
|
||||||
# Backward
|
|
||||||
# Annotate each publisher with the sum of the price of all books sold
|
|
||||||
>>> publishers = Publisher.objects.all().annotate(Sum('book__price'))
|
|
||||||
>>> sorted([(p.name, p.book__price__sum) for p in publishers])
|
|
||||||
[(u'Apress', Decimal("59.69")), (u"Jonno's House of Books", None), (u'Morgan Kaufmann', Decimal("75.00")), (u'Prentice Hall', Decimal("112.49")), (u'Sams', Decimal("23.09"))]
|
|
||||||
|
|
||||||
# Calls to values() are not commutative over annotate().
|
|
||||||
|
|
||||||
# Calling values on a queryset that has annotations returns the output
|
|
||||||
# as a dictionary
|
|
||||||
>>> [sorted(o.iteritems()) for o in Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values()]
|
|
||||||
[[('contact_id', 1), ('id', 1), ('isbn', u'159059725'), ('mean_age', 34.5), ('name', u'The Definitive Guide to Django: Web Development Done Right'), ('pages', 447), ('price', Decimal("30...")), ('pubdate', datetime.date(2007, 12, 6)), ('publisher_id', 1), ('rating', 4.5)]]
|
|
||||||
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
|
|
||||||
[{'pk': 1, 'isbn': u'159059725', 'mean_age': 34.5}]
|
|
||||||
|
|
||||||
# Calling values() with parameters reduces the output
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('name')
|
|
||||||
[{'name': u'The Definitive Guide to Django: Web Development Done Right'}]
|
|
||||||
|
|
||||||
# An empty values() call before annotating has the same effect as an
|
|
||||||
# empty values() call after annotating
|
|
||||||
>>> [sorted(o.iteritems()) for o in Book.objects.filter(pk=1).values().annotate(mean_age=Avg('authors__age'))]
|
|
||||||
[[('contact_id', 1), ('id', 1), ('isbn', u'159059725'), ('mean_age', 34.5), ('name', u'The Definitive Guide to Django: Web Development Done Right'), ('pages', 447), ('price', Decimal("30...")), ('pubdate', datetime.date(2007, 12, 6)), ('publisher_id', 1), ('rating', 4.5)]]
|
|
||||||
|
|
||||||
# Calling annotate() on a ValuesQuerySet annotates over the groups of
|
|
||||||
# fields to be selected by the ValuesQuerySet.
|
|
||||||
|
|
||||||
# Note that an extra parameter is added to each dictionary. This
|
|
||||||
# parameter is a queryset representing the objects that have been
|
|
||||||
# grouped to generate the annotation
|
|
||||||
|
|
||||||
>>> Book.objects.all().values('rating').annotate(n_authors=Count('authors__id'), mean_age=Avg('authors__age')).order_by('rating')
|
|
||||||
[{'rating': 3.0, 'n_authors': 1, 'mean_age': 45.0}, {'rating': 4.0, 'n_authors': 6, 'mean_age': 37.1...}, {'rating': 4.5, 'n_authors': 2, 'mean_age': 34.5}, {'rating': 5.0, 'n_authors': 1, 'mean_age': 57.0}]
|
|
||||||
|
|
||||||
# If a join doesn't match any objects, an aggregate returns None
|
|
||||||
>>> authors = Author.objects.all().annotate(Avg('friends__age')).order_by('id')
|
|
||||||
>>> len(authors)
|
|
||||||
9
|
|
||||||
>>> sorted([(a.name, a.friends__age__avg) for a in authors])
|
|
||||||
[(u'Adrian Holovaty', 32.0), (u'Brad Dayley', None), (u'Jacob Kaplan-Moss', 29.5), (u'James Bennett', 34.0), (u'Jeffrey Forcier', 27.0), (u'Paul Bissex', 31.0), (u'Peter Norvig', 46.0), (u'Stuart Russell', 57.0), (u'Wesley J. Chun', 33.6...)]
|
|
||||||
|
|
||||||
|
|
||||||
# The Count aggregation function allows an extra parameter: distinct.
|
|
||||||
# This restricts the count results to unique items
|
|
||||||
>>> Book.objects.all().aggregate(Count('rating'))
|
|
||||||
{'rating__count': 6}
|
|
||||||
|
|
||||||
>>> Book.objects.all().aggregate(Count('rating', distinct=True))
|
|
||||||
{'rating__count': 4}
|
|
||||||
|
|
||||||
# Retreiving the grouped objects
|
|
||||||
|
|
||||||
# When using Count you can also omit the primary key and refer only to
|
|
||||||
# the related field name if you want to count all the related objects
|
|
||||||
# and not a specific column
|
|
||||||
>>> explicit = list(Author.objects.annotate(Count('book__id')))
|
|
||||||
>>> implicit = list(Author.objects.annotate(Count('book')))
|
|
||||||
>>> explicit == implicit
|
|
||||||
True
|
|
||||||
|
|
||||||
# Ordering is allowed on aggregates
|
|
||||||
>>> Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
|
|
||||||
[{'rating': 4.5, 'oldest': 35}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.0, 'oldest': 57}, {'rating': 5.0, 'oldest': 57}]
|
|
||||||
|
|
||||||
>>> Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('-oldest', '-rating')
|
|
||||||
[{'rating': 5.0, 'oldest': 57}, {'rating': 4.0, 'oldest': 57}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.5, 'oldest': 35}]
|
|
||||||
|
|
||||||
# It is possible to aggregate over anotated values
|
|
||||||
>>> Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Avg('num_authors'))
|
|
||||||
{'num_authors__avg': 1.66...}
|
|
||||||
|
|
||||||
# You can filter the results based on the aggregation alias.
|
|
||||||
|
|
||||||
# Lets add a publisher to test the different possibilities for filtering
|
|
||||||
>>> p = Publisher(name='Expensive Publisher', num_awards=0)
|
|
||||||
>>> p.save()
|
|
||||||
>>> Book(name='ExpensiveBook1', pages=1, isbn='111', rating=3.5, price=Decimal("1000"), publisher=p, contact_id=1, pubdate=date(2008,12,1)).save()
|
|
||||||
>>> Book(name='ExpensiveBook2', pages=1, isbn='222', rating=4.0, price=Decimal("1000"), publisher=p, contact_id=1, pubdate=date(2008,12,2)).save()
|
|
||||||
>>> Book(name='ExpensiveBook3', pages=1, isbn='333', rating=4.5, price=Decimal("35"), publisher=p, contact_id=1, pubdate=date(2008,12,3)).save()
|
|
||||||
|
|
||||||
# Publishers that have:
|
|
||||||
|
|
||||||
# (i) more than one book
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book__id')).filter(num_books__gt=1).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Prentice Hall>, <Publisher: Expensive Publisher>]
|
|
||||||
|
|
||||||
# (ii) a book that cost less than 40
|
|
||||||
>>> Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Apress>, <Publisher: Sams>, <Publisher: Prentice Hall>, <Publisher: Expensive Publisher>]
|
|
||||||
|
|
||||||
# (iii) more than one book and (at least) a book that cost less than 40
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book__id')).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Prentice Hall>, <Publisher: Expensive Publisher>]
|
|
||||||
|
|
||||||
# (iv) more than one book that costs less than $40
|
|
||||||
>>> Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count('book__id')).filter(num_books__gt=1).order_by('pk')
|
|
||||||
[<Publisher: Apress>]
|
|
||||||
|
|
||||||
# Now a bit of testing on the different lookup types
|
|
||||||
#
|
|
||||||
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book')).filter(num_books__range=[1, 3]).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Sams>, <Publisher: Prentice Hall>, <Publisher: Morgan Kaufmann>, <Publisher: Expensive Publisher>]
|
|
||||||
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book')).filter(num_books__range=[1, 2]).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Sams>, <Publisher: Prentice Hall>, <Publisher: Morgan Kaufmann>]
|
|
||||||
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book')).filter(num_books__in=[1, 3]).order_by('pk')
|
|
||||||
[<Publisher: Sams>, <Publisher: Morgan Kaufmann>, <Publisher: Expensive Publisher>]
|
|
||||||
|
|
||||||
>>> Publisher.objects.annotate(num_books=Count('book')).filter(num_books__isnull=True)
|
|
||||||
[]
|
|
||||||
|
|
||||||
>>> p.delete()
|
|
||||||
|
|
||||||
# Does Author X have any friends? (or better, how many friends does author X have)
|
|
||||||
>> Author.objects.filter(pk=1).aggregate(Count('friends__id'))
|
|
||||||
{'friends__id__count': 2.0}
|
|
||||||
|
|
||||||
# Give me a list of all Books with more than 1 authors
|
|
||||||
>>> Book.objects.all().annotate(num_authors=Count('authors__name')).filter(num_authors__ge=2).order_by('pk')
|
|
||||||
[<Book: The Definitive Guide to Django: Web Development Done Right>, <Book: Artificial Intelligence: A Modern Approach>]
|
|
||||||
|
|
||||||
# Give me a list of all Authors that have no friends
|
|
||||||
>>> Author.objects.all().annotate(num_friends=Count('friends__id', distinct=True)).filter(num_friends=0).order_by('pk')
|
|
||||||
[<Author: Brad Dayley>]
|
|
||||||
|
|
||||||
# Give me a list of all publishers that have published more than 1 books
|
|
||||||
>>> Publisher.objects.all().annotate(num_books=Count('book__id')).filter(num_books__gt=1).order_by('pk')
|
|
||||||
[<Publisher: Apress>, <Publisher: Prentice Hall>]
|
|
||||||
|
|
||||||
# Give me a list of all publishers that have published more than 1 books that cost less than 40
|
|
||||||
>>> Publisher.objects.all().filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count('book__id')).filter(num_books__gt=1)
|
|
||||||
[<Publisher: Apress>]
|
|
||||||
|
|
||||||
# Give me a list of all Books that were written by X and one other author.
|
|
||||||
>>> Book.objects.all().annotate(num_authors=Count('authors__id')).filter(authors__name__contains='Norvig', num_authors__gt=1)
|
|
||||||
[<Book: Artificial Intelligence: A Modern Approach>]
|
|
||||||
|
|
||||||
# Give me the average rating of all Books that were written by X and one other author.
|
|
||||||
#(Aggregate over objects discovered using membership of the m2m set)
|
|
||||||
|
|
||||||
# Adding an existing author to another book to test it the right way
|
|
||||||
>>> a = Author.objects.get(name__contains='Norvig')
|
|
||||||
>>> b = Book.objects.get(name__contains='Done Right')
|
|
||||||
>>> b.authors.add(a)
|
|
||||||
>>> b.save()
|
|
||||||
|
|
||||||
# This should do it
|
|
||||||
>>> Book.objects.all().annotate(num_authors=Count('authors__id')).filter(authors__name__contains='Norvig', num_authors__gt=1).aggregate(Avg('rating'))
|
|
||||||
{'rating__avg': 4.25}
|
|
||||||
>>> b.authors.remove(a)
|
|
||||||
|
|
||||||
# Give me a list of all Authors that have published a book with at least one other person
|
|
||||||
# (Filters over a count generated on a related object)
|
|
||||||
#
|
|
||||||
# Cheating: [a for a in Author.objects.all().annotate(num_coleagues=Count('book__authors__id'), num_books=Count('book__id', distinct=True)) if a.num_coleagues - a.num_books > 0]
|
|
||||||
# F-Syntax is required. Will be fixed after F objects are available
|
|
||||||
|
|
||||||
# Aggregates also work on dates, times and datetimes
|
|
||||||
>>> Publisher.objects.annotate(earliest_book=Min('book__pubdate')).exclude(earliest_book=None).order_by('earliest_book').values()
|
|
||||||
[{'earliest_book': datetime.date(1991, 10, 15), 'num_awards': 9, 'id': 4, 'name': u'Morgan Kaufmann'}, {'earliest_book': datetime.date(1995, 1, 15), 'num_awards': 7, 'id': 3, 'name': u'Prentice Hall'}, {'earliest_book': datetime.date(2007, 12, 6), 'num_awards': 3, 'id': 1, 'name': u'Apress'}, {'earliest_book': datetime.date(2008, 3, 3), 'num_awards': 1, 'id': 2, 'name': u'Sams'}]
|
|
||||||
|
|
||||||
>>> Store.objects.aggregate(Max('friday_night_closing'), Min("original_opening"))
|
|
||||||
{'friday_night_closing__max': datetime.time(23, 59, 59), 'original_opening__min': datetime.datetime(1945, 4, 25, 16, 24, 14)}
|
|
||||||
|
|
||||||
# values_list() can also be used
|
|
||||||
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values_list('pk', 'isbn', 'mean_age')
|
|
||||||
[(1, u'159059725', 34.5)]
|
|
||||||
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values_list('isbn')
|
|
||||||
[(u'159059725',)]
|
|
||||||
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values_list('mean_age')
|
|
||||||
[(34.5,)]
|
|
||||||
|
|
||||||
>>> Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values_list('mean_age', flat=True)
|
|
||||||
[34.5]
|
|
||||||
|
|
||||||
>>> qs = Book.objects.values_list('price').annotate(count=Count('price')).order_by('-count', 'price')
|
|
||||||
>>> list(qs) == [(Decimal('29.69'), 2), (Decimal('23.09'), 1), (Decimal('30'), 1), (Decimal('75'), 1), (Decimal('82.8'), 1)]
|
|
||||||
True
|
|
||||||
|
|
||||||
"""}
|
|
||||||
|
578
tests/modeltests/aggregation/tests.py
Normal file
578
tests/modeltests/aggregation/tests.py
Normal file
@ -0,0 +1,578 @@
|
|||||||
|
import datetime
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
|
from django.db.models import Avg, Sum, Count, Max, Min
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from models import Author, Publisher, Book, Store
|
||||||
|
|
||||||
|
|
||||||
|
class Approximate(object):
|
||||||
|
def __init__(self, val, places=7):
|
||||||
|
self.val = val
|
||||||
|
self.places = places
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return repr(self.val)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if self.val == other:
|
||||||
|
return True
|
||||||
|
return round(abs(self.val-other), self.places) == 0
|
||||||
|
|
||||||
|
class BaseAggregateTestCase(TestCase):
|
||||||
|
fixtures = ["initial_data.json"]
|
||||||
|
|
||||||
|
def test_empty_aggregate(self):
|
||||||
|
self.assertEqual(Author.objects.all().aggregate(), {})
|
||||||
|
|
||||||
|
def test_single_aggregate(self):
|
||||||
|
vals = Author.objects.aggregate(Avg("age"))
|
||||||
|
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
|
||||||
|
|
||||||
|
def test_multiple_aggregates(self):
|
||||||
|
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
|
||||||
|
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
|
||||||
|
|
||||||
|
def test_filter_aggregate(self):
|
||||||
|
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEqual(vals["age__sum"], 254)
|
||||||
|
|
||||||
|
def test_related_aggregate(self):
|
||||||
|
vals = Author.objects.aggregate(Avg("friends__age"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
|
||||||
|
|
||||||
|
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
|
||||||
|
|
||||||
|
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEqual(vals["book__rating__avg"], 4.0)
|
||||||
|
|
||||||
|
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEquals(vals["publisher__num_awards__sum"], 30)
|
||||||
|
|
||||||
|
vals = Publisher.objects.aggregate(Sum("book__price"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
|
||||||
|
|
||||||
|
def test_aggregate_multi_join(self):
|
||||||
|
vals = Store.objects.aggregate(Max("books__authors__age"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEqual(vals["books__authors__age__max"], 57)
|
||||||
|
|
||||||
|
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
|
||||||
|
|
||||||
|
def test_aggregate_alias(self):
|
||||||
|
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
|
||||||
|
self.assertEqual(len(vals), 1)
|
||||||
|
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
|
||||||
|
|
||||||
|
def test_annotate_basic(self):
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
Book.objects.annotate().order_by('pk'), [
|
||||||
|
"The Definitive Guide to Django: Web Development Done Right",
|
||||||
|
"Sams Teach Yourself Django in 24 Hours",
|
||||||
|
"Practical Django Projects",
|
||||||
|
"Python Web Development with Django",
|
||||||
|
"Artificial Intelligence: A Modern Approach",
|
||||||
|
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
|
||||||
|
],
|
||||||
|
lambda b: b.name
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.annotate(mean_age=Avg("authors__age"))
|
||||||
|
b = books.get(pk=1)
|
||||||
|
self.assertEqual(
|
||||||
|
b.name,
|
||||||
|
u'The Definitive Guide to Django: Web Development Done Right'
|
||||||
|
)
|
||||||
|
self.assertEqual(b.mean_age, 34.5)
|
||||||
|
|
||||||
|
def test_annotate_m2m(self):
|
||||||
|
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
books, [
|
||||||
|
(u'Artificial Intelligence: A Modern Approach', 51.5),
|
||||||
|
(u'Practical Django Projects', 29.0),
|
||||||
|
(u'Python Web Development with Django', Approximate(30.3, places=1)),
|
||||||
|
(u'Sams Teach Yourself Django in 24 Hours', 45.0)
|
||||||
|
],
|
||||||
|
lambda b: (b.name, b.authors__age__avg),
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
books, [
|
||||||
|
(u'Artificial Intelligence: A Modern Approach', 2),
|
||||||
|
(u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
|
||||||
|
(u'Practical Django Projects', 1),
|
||||||
|
(u'Python Web Development with Django', 3),
|
||||||
|
(u'Sams Teach Yourself Django in 24 Hours', 1),
|
||||||
|
(u'The Definitive Guide to Django: Web Development Done Right', 2)
|
||||||
|
],
|
||||||
|
lambda b: (b.name, b.num_authors)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_backwards_m2m_annotate(self):
|
||||||
|
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
authors, [
|
||||||
|
(u'Adrian Holovaty', 4.5),
|
||||||
|
(u'Brad Dayley', 3.0),
|
||||||
|
(u'Jacob Kaplan-Moss', 4.5),
|
||||||
|
(u'James Bennett', 4.0),
|
||||||
|
(u'Paul Bissex', 4.0),
|
||||||
|
(u'Stuart Russell', 4.0)
|
||||||
|
],
|
||||||
|
lambda a: (a.name, a.book__rating__avg)
|
||||||
|
)
|
||||||
|
|
||||||
|
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
authors, [
|
||||||
|
(u'Adrian Holovaty', 1),
|
||||||
|
(u'Brad Dayley', 1),
|
||||||
|
(u'Jacob Kaplan-Moss', 1),
|
||||||
|
(u'James Bennett', 1),
|
||||||
|
(u'Jeffrey Forcier', 1),
|
||||||
|
(u'Paul Bissex', 1),
|
||||||
|
(u'Peter Norvig', 2),
|
||||||
|
(u'Stuart Russell', 1),
|
||||||
|
(u'Wesley J. Chun', 1)
|
||||||
|
],
|
||||||
|
lambda a: (a.name, a.num_books)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_reverse_fkey_annotate(self):
|
||||||
|
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
books, [
|
||||||
|
(u'Artificial Intelligence: A Modern Approach', 7),
|
||||||
|
(u'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
|
||||||
|
(u'Practical Django Projects', 3),
|
||||||
|
(u'Python Web Development with Django', 7),
|
||||||
|
(u'Sams Teach Yourself Django in 24 Hours', 1),
|
||||||
|
(u'The Definitive Guide to Django: Web Development Done Right', 3)
|
||||||
|
],
|
||||||
|
lambda b: (b.name, b.publisher__num_awards__sum)
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
(u'Apress', Decimal("59.69")),
|
||||||
|
(u"Jonno's House of Books", None),
|
||||||
|
(u'Morgan Kaufmann', Decimal("75.00")),
|
||||||
|
(u'Prentice Hall', Decimal("112.49")),
|
||||||
|
(u'Sams', Decimal("23.09"))
|
||||||
|
],
|
||||||
|
lambda p: (p.name, p.book__price__sum)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_annotate_values(self):
|
||||||
|
books = list(Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values())
|
||||||
|
self.assertEqual(
|
||||||
|
books, [
|
||||||
|
{
|
||||||
|
"contact_id": 1,
|
||||||
|
"id": 1,
|
||||||
|
"isbn": "159059725",
|
||||||
|
"mean_age": 34.5,
|
||||||
|
"name": "The Definitive Guide to Django: Web Development Done Right",
|
||||||
|
"pages": 447,
|
||||||
|
"price": Approximate(Decimal("30")),
|
||||||
|
"pubdate": datetime.date(2007, 12, 6),
|
||||||
|
"publisher_id": 1,
|
||||||
|
"rating": 4.5,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"pk": 1,
|
||||||
|
"isbn": "159059725",
|
||||||
|
"mean_age": 34.5,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values("name")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"name": "The Definitive Guide to Django: Web Development Done Right"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).values().annotate(mean_age=Avg('authors__age'))
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"contact_id": 1,
|
||||||
|
"id": 1,
|
||||||
|
"isbn": "159059725",
|
||||||
|
"mean_age": 34.5,
|
||||||
|
"name": "The Definitive Guide to Django: Web Development Done Right",
|
||||||
|
"pages": 447,
|
||||||
|
"price": Approximate(Decimal("30")),
|
||||||
|
"pubdate": datetime.date(2007, 12, 6),
|
||||||
|
"publisher_id": 1,
|
||||||
|
"rating": 4.5,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"rating": 3.0,
|
||||||
|
"n_authors": 1,
|
||||||
|
"mean_age": 45.0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 4.0,
|
||||||
|
"n_authors": 6,
|
||||||
|
"mean_age": Approximate(37.16, places=1)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 4.5,
|
||||||
|
"n_authors": 2,
|
||||||
|
"mean_age": 34.5,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 5.0,
|
||||||
|
"n_authors": 1,
|
||||||
|
"mean_age": 57.0,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
|
||||||
|
self.assertEqual(len(authors), 9)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
authors, [
|
||||||
|
(u'Adrian Holovaty', 32.0),
|
||||||
|
(u'Brad Dayley', None),
|
||||||
|
(u'Jacob Kaplan-Moss', 29.5),
|
||||||
|
(u'James Bennett', 34.0),
|
||||||
|
(u'Jeffrey Forcier', 27.0),
|
||||||
|
(u'Paul Bissex', 31.0),
|
||||||
|
(u'Peter Norvig', 46.0),
|
||||||
|
(u'Stuart Russell', 57.0),
|
||||||
|
(u'Wesley J. Chun', Approximate(33.66, places=1))
|
||||||
|
],
|
||||||
|
lambda a: (a.name, a.friends__age__avg)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_count(self):
|
||||||
|
vals = Book.objects.aggregate(Count("rating"))
|
||||||
|
self.assertEqual(vals, {"rating__count": 6})
|
||||||
|
|
||||||
|
vals = Book.objects.aggregate(Count("rating", distinct=True))
|
||||||
|
self.assertEqual(vals, {"rating__count": 4})
|
||||||
|
|
||||||
|
def test_fkey_aggregate(self):
|
||||||
|
explicit = list(Author.objects.annotate(Count('book__id')))
|
||||||
|
implicit = list(Author.objects.annotate(Count('book')))
|
||||||
|
self.assertEqual(explicit, implicit)
|
||||||
|
|
||||||
|
def test_annotate_ordering(self):
|
||||||
|
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"rating": 4.5,
|
||||||
|
"oldest": 35,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 3.0,
|
||||||
|
"oldest": 45
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 4.0,
|
||||||
|
"oldest": 57,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 5.0,
|
||||||
|
"oldest": 57,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
{
|
||||||
|
"rating": 5.0,
|
||||||
|
"oldest": 57,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 4.0,
|
||||||
|
"oldest": 57,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 3.0,
|
||||||
|
"oldest": 45,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rating": 4.5,
|
||||||
|
"oldest": 35,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_aggregate_annotation(self):
|
||||||
|
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
|
||||||
|
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
|
||||||
|
|
||||||
|
def test_filtering(self):
|
||||||
|
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
|
||||||
|
Book.objects.create(
|
||||||
|
name='ExpensiveBook1',
|
||||||
|
pages=1,
|
||||||
|
isbn='111',
|
||||||
|
rating=3.5,
|
||||||
|
price=Decimal("1000"),
|
||||||
|
publisher=p,
|
||||||
|
contact_id=1,
|
||||||
|
pubdate=datetime.date(2008,12,1)
|
||||||
|
)
|
||||||
|
Book.objects.create(
|
||||||
|
name='ExpensiveBook2',
|
||||||
|
pages=1,
|
||||||
|
isbn='222',
|
||||||
|
rating=4.0,
|
||||||
|
price=Decimal("1000"),
|
||||||
|
publisher=p,
|
||||||
|
contact_id=1,
|
||||||
|
pubdate=datetime.date(2008,12,2)
|
||||||
|
)
|
||||||
|
Book.objects.create(
|
||||||
|
name='ExpensiveBook3',
|
||||||
|
pages=1,
|
||||||
|
isbn='333',
|
||||||
|
rating=4.5,
|
||||||
|
price=Decimal("35"),
|
||||||
|
publisher=p,
|
||||||
|
contact_id=1,
|
||||||
|
pubdate=datetime.date(2008,12,3)
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Prentice Hall",
|
||||||
|
"Expensive Publisher",
|
||||||
|
],
|
||||||
|
lambda p: p.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Apress",
|
||||||
|
"Sams",
|
||||||
|
"Prentice Hall",
|
||||||
|
"Expensive Publisher",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Prentice Hall",
|
||||||
|
"Expensive Publisher",
|
||||||
|
],
|
||||||
|
lambda p: p.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Sams",
|
||||||
|
"Prentice Hall",
|
||||||
|
"Morgan Kaufmann",
|
||||||
|
"Expensive Publisher",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Sams",
|
||||||
|
"Prentice Hall",
|
||||||
|
"Morgan Kaufmann",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Sams",
|
||||||
|
"Morgan Kaufmann",
|
||||||
|
"Expensive Publisher",
|
||||||
|
],
|
||||||
|
lambda p: p.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
|
||||||
|
self.assertEqual(len(publishers), 0)
|
||||||
|
|
||||||
|
def test_annotation(self):
|
||||||
|
vals = Author.objects.filter(pk=1).aggregate(Count("friends__id"))
|
||||||
|
self.assertEqual(vals, {"friends__id__count": 2})
|
||||||
|
|
||||||
|
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__ge=2).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
books, [
|
||||||
|
"The Definitive Guide to Django: Web Development Done Right",
|
||||||
|
"Artificial Intelligence: A Modern Approach",
|
||||||
|
],
|
||||||
|
lambda b: b.name
|
||||||
|
)
|
||||||
|
|
||||||
|
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
authors, [
|
||||||
|
"Brad Dayley",
|
||||||
|
],
|
||||||
|
lambda a: a.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
"Prentice Hall",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
publishers, [
|
||||||
|
"Apress",
|
||||||
|
],
|
||||||
|
lambda p: p.name
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
books, [
|
||||||
|
"Artificial Intelligence: A Modern Approach",
|
||||||
|
],
|
||||||
|
lambda b: b.name
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_more_aggregation(self):
|
||||||
|
a = Author.objects.get(name__contains='Norvig')
|
||||||
|
b = Book.objects.get(name__contains='Done Right')
|
||||||
|
b.authors.add(a)
|
||||||
|
b.save()
|
||||||
|
|
||||||
|
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
|
||||||
|
self.assertEqual(vals, {"rating__avg": 4.25})
|
||||||
|
|
||||||
|
def test_even_more_aggregate(self):
|
||||||
|
publishers = Publisher.objects.annotate(earliest_book=Min("book__pubdate")).exclude(earliest_book=None).order_by("earliest_book").values()
|
||||||
|
self.assertEqual(
|
||||||
|
list(publishers), [
|
||||||
|
{
|
||||||
|
'earliest_book': datetime.date(1991, 10, 15),
|
||||||
|
'num_awards': 9,
|
||||||
|
'id': 4,
|
||||||
|
'name': u'Morgan Kaufmann'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'earliest_book': datetime.date(1995, 1, 15),
|
||||||
|
'num_awards': 7,
|
||||||
|
'id': 3,
|
||||||
|
'name': u'Prentice Hall'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'earliest_book': datetime.date(2007, 12, 6),
|
||||||
|
'num_awards': 3,
|
||||||
|
'id': 1,
|
||||||
|
'name': u'Apress'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'earliest_book': datetime.date(2008, 3, 3),
|
||||||
|
'num_awards': 1,
|
||||||
|
'id': 2,
|
||||||
|
'name': u'Sams'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
|
||||||
|
self.assertEqual(
|
||||||
|
vals,
|
||||||
|
{
|
||||||
|
"friday_night_closing__max": datetime.time(23, 59, 59),
|
||||||
|
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_annotate_values_list(self):
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
(1, "159059725", 34.5),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("isbn")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
('159059725',)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
(34.5,)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
books = Book.objects.filter(pk=1).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
|
||||||
|
self.assertEqual(list(books), [34.5])
|
||||||
|
|
||||||
|
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
|
||||||
|
self.assertEqual(
|
||||||
|
list(books), [
|
||||||
|
(Decimal("29.69"), 2),
|
||||||
|
(Decimal('23.09'), 1),
|
||||||
|
(Decimal('30'), 1),
|
||||||
|
(Decimal('75'), 1),
|
||||||
|
(Decimal('82.8'), 1),
|
||||||
|
]
|
||||||
|
)
|
@ -90,230 +90,3 @@ class Book(models.Model):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ('name',)
|
ordering = ('name',)
|
||||||
|
|
||||||
__test__ = {'API_TESTS': """
|
|
||||||
>>> from django.core import management
|
|
||||||
>>> from django.db.models import get_app
|
|
||||||
|
|
||||||
# Reset the database representation of this app.
|
|
||||||
# This will return the database to a clean initial state.
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Syncdb introduces 1 initial data object from initial_data.json.
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Load fixture 1. Single JSON file, with two objects.
|
|
||||||
>>> management.call_command('loaddata', 'fixture1.json', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as a JSON fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Try just dumping the contents of fixtures.Category
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Category', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]
|
|
||||||
|
|
||||||
# ...and just fixtures.Article
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Article', format='json')
|
|
||||||
[{"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# ...and both
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Category', 'fixtures.Article', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Specify a specific model twice
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Article', 'fixtures.Article', format='json')
|
|
||||||
[{"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Specify a dump that specifies Article both explicitly and implicitly
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Article', 'fixtures', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Same again, but specify in the reverse order
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', 'fixtures.Article', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Specify one model from one application, and an entire other application.
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.Category', 'sites', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]
|
|
||||||
|
|
||||||
# Load fixture 2. JSON file imported by default. Overwrites some existing objects
|
|
||||||
>>> management.call_command('loaddata', 'fixture2.json', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Load fixture 3, XML format.
|
|
||||||
>>> management.call_command('loaddata', 'fixture3.xml', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: XML identified as leading cause of cancer>, <Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker on TV is great!>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne.
|
|
||||||
>>> management.call_command('loaddata', 'fixture6.json', verbosity=0)
|
|
||||||
>>> Tag.objects.all()
|
|
||||||
[<Tag: <Article: Copyright is fine the way it is> tagged "copyright">, <Tag: <Article: Copyright is fine the way it is> tagged "law">]
|
|
||||||
|
|
||||||
# Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne.
|
|
||||||
>>> management.call_command('loaddata', 'fixture7.xml', verbosity=0)
|
|
||||||
>>> Tag.objects.all()
|
|
||||||
[<Tag: <Article: Copyright is fine the way it is> tagged "copyright">, <Tag: <Article: Copyright is fine the way it is> tagged "legal">, <Tag: <Article: Django conquers world!> tagged "django">, <Tag: <Article: Django conquers world!> tagged "world domination">]
|
|
||||||
|
|
||||||
# Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany.
|
|
||||||
>>> management.call_command('loaddata', 'fixture8.json', verbosity=0)
|
|
||||||
>>> Visa.objects.all()
|
|
||||||
[<Visa: Django Reinhardt Can add user, Can change user, Can delete user>, <Visa: Stephane Grappelli Can add user>, <Visa: Prince >]
|
|
||||||
|
|
||||||
# Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany.
|
|
||||||
>>> management.call_command('loaddata', 'fixture9.xml', verbosity=0)
|
|
||||||
>>> Visa.objects.all()
|
|
||||||
[<Visa: Django Reinhardt Can add user, Can change user, Can delete user>, <Visa: Stephane Grappelli Can add user, Can delete user>, <Visa: Artist formerly known as "Prince" Can change user>]
|
|
||||||
|
|
||||||
>>> Book.objects.all()
|
|
||||||
[<Book: Music for all ages by Artist formerly known as "Prince" and Django Reinhardt>]
|
|
||||||
|
|
||||||
# Load a fixture that doesn't exist
|
|
||||||
>>> management.call_command('loaddata', 'unknown.json', verbosity=0)
|
|
||||||
|
|
||||||
# object list is unaffected
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: XML identified as leading cause of cancer>, <Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker on TV is great!>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# By default, you get raw keys on dumpdata
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.book', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [3, 1]}}]
|
|
||||||
|
|
||||||
# But you can get natural keys if you ask for them and they are available
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures.book', format='json', use_natural_keys=True)
|
|
||||||
[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as a JSON fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='json', use_natural_keys=True)
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 5, "model": "fixtures.article", "fields": {"headline": "XML identified as leading cause of cancer", "pub_date": "2006-06-16 16:00:00"}}, {"pk": 4, "model": "fixtures.article", "fields": {"headline": "Django conquers world!", "pub_date": "2006-06-16 15:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16 14:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker on TV is great!", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "legal", "tagged_id": 3}}, {"pk": 3, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "django", "tagged_id": 4}}, {"pk": 4, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "world domination", "tagged_id": 4}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Artist formerly known as \\"Prince\\""}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 1, "model": "fixtures.visa", "fields": {"person": ["Django Reinhardt"], "permissions": [["add_user", "auth", "user"], ["change_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 2, "model": "fixtures.visa", "fields": {"person": ["Stephane Grappelli"], "permissions": [["add_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 3, "model": "fixtures.visa", "fields": {"person": ["Artist formerly known as \\"Prince\\""], "permissions": [["change_user", "auth", "user"]]}}, {"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as an XML fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='xml', use_natural_keys=True)
|
|
||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="5" model="fixtures.article"><field type="CharField" name="headline">XML identified as leading cause of cancer</field><field type="DateTimeField" name="pub_date">2006-06-16 16:00:00</field></object><object pk="4" model="fixtures.article"><field type="CharField" name="headline">Django conquers world!</field><field type="DateTimeField" name="pub_date">2006-06-16 15:00:00</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Copyright is fine the way it is</field><field type="DateTimeField" name="pub_date">2006-06-16 14:00:00</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker on TV is great!</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.article"><field type="CharField" name="headline">Python program becomes self aware</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">legal</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="3" model="fixtures.tag"><field type="CharField" name="name">django</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="4" model="fixtures.tag"><field type="CharField" name="name">world domination</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Artist formerly known as "Prince"</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object><object pk="1" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Django Reinhardt</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="2" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Stephane Grappelli</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="3" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Artist formerly known as "Prince"</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="1" model="fixtures.book"><field type="CharField" name="name">Music for all ages</field><field to="fixtures.person" name="authors" rel="ManyToManyRel"><object><natural>Artist formerly known as "Prince"</natural></object><object><natural>Django Reinhardt</natural></object></field></object></django-objects>
|
|
||||||
|
|
||||||
"""}
|
|
||||||
|
|
||||||
# Database flushing does not work on MySQL with the default storage engine
|
|
||||||
# because it requires transaction support.
|
|
||||||
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
|
|
||||||
__test__['API_TESTS'] += \
|
|
||||||
"""
|
|
||||||
# Reset the database representation of this app. This will delete all data.
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Load fixture 1 again, using format discovery
|
|
||||||
>>> management.call_command('loaddata', 'fixture1', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Try to load fixture 2 using format discovery; this will fail
|
|
||||||
# because there are two fixture2's in the fixtures directory
|
|
||||||
>>> management.call_command('loaddata', 'fixture2', verbosity=0) # doctest: +ELLIPSIS
|
|
||||||
Multiple fixtures named 'fixture2' in '...fixtures'. Aborting.
|
|
||||||
|
|
||||||
# object list is unaffected
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as a JSON fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='json')
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]
|
|
||||||
|
|
||||||
# Load fixture 4 (compressed), using format discovery
|
|
||||||
>>> management.call_command('loaddata', 'fixture4', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Django pets kitten>, <Article: Time to reform copyright>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load fixture 4 (compressed), using format specification
|
|
||||||
>>> management.call_command('loaddata', 'fixture4.json', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Django pets kitten>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load fixture 5 (compressed), using format *and* compression specification
|
|
||||||
>>> management.call_command('loaddata', 'fixture5.json.zip', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: WoW subscribers now outnumber readers>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load fixture 5 (compressed), only compression specification
|
|
||||||
>>> management.call_command('loaddata', 'fixture5.zip', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: WoW subscribers now outnumber readers>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Try to load fixture 5 using format and compression discovery; this will fail
|
|
||||||
# because there are two fixture5's in the fixtures directory
|
|
||||||
>>> management.call_command('loaddata', 'fixture5', verbosity=0) # doctest: +ELLIPSIS
|
|
||||||
Multiple fixtures named 'fixture5' in '...fixtures'. Aborting.
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load db fixtures 1 and 2. These will load using the 'default' database identifier implicitly
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_1', verbosity=0)
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_2', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Who needs more than one database?>, <Article: Who needs to use compressed data?>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load db fixtures 1 and 2. These will load using the 'default' database identifier explicitly
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_1', verbosity=0, using='default')
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_2', verbosity=0, using='default')
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Who needs more than one database?>, <Article: Who needs to use compressed data?>, <Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Try to load db fixture 3. This won't load because the database identifier doesn't match
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_3', verbosity=0)
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('loaddata', 'db_fixture_3', verbosity=0, using='default')
|
|
||||||
>>> Article.objects.all()
|
|
||||||
[<Article: Python program becomes self aware>]
|
|
||||||
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Load back in fixture 1, we need the articles from it
|
|
||||||
>>> management.call_command('loaddata', 'fixture1', verbosity=0)
|
|
||||||
|
|
||||||
# Try to load fixture 6 using format discovery
|
|
||||||
>>> management.call_command('loaddata', 'fixture6', verbosity=0)
|
|
||||||
>>> Tag.objects.all()
|
|
||||||
[<Tag: <Article: Time to reform copyright> tagged "copyright">, <Tag: <Article: Time to reform copyright> tagged "law">]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as a JSON fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='json', use_natural_keys=True)
|
|
||||||
[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "law", "tagged_id": 3}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}]
|
|
||||||
|
|
||||||
# Dump the current contents of the database as an XML fixture
|
|
||||||
>>> management.call_command('dumpdata', 'fixtures', format='xml', use_natural_keys=True)
|
|
||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Time to reform copyright</field><field type="DateTimeField" name="pub_date">2006-06-16 13:00:00</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker has no place on ESPN</field><field type="DateTimeField" name="pub_date">2006-06-16 12:00:00</field></object><object pk="1" model="fixtures.article"><field type="CharField" name="headline">Python program becomes self aware</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">law</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Prince</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object></django-objects>
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
class SampleTestCase(TestCase):
|
|
||||||
fixtures = ['fixture1.json', 'fixture2.json']
|
|
||||||
|
|
||||||
def testClassFixtures(self):
|
|
||||||
"Check that test case has installed 4 fixture objects"
|
|
||||||
self.assertEqual(Article.objects.count(), 4)
|
|
||||||
self.assertEquals(str(Article.objects.all()), "[<Article: Django conquers world!>, <Article: Copyright is fine the way it is>, <Article: Poker has no place on ESPN>, <Article: Python program becomes self aware>]")
|
|
||||||
|
277
tests/modeltests/fixtures/tests.py
Normal file
277
tests/modeltests/fixtures/tests.py
Normal file
@ -0,0 +1,277 @@
|
|||||||
|
import StringIO
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from django.test import TestCase, TransactionTestCase
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core import management
|
||||||
|
from django.db import DEFAULT_DB_ALIAS
|
||||||
|
|
||||||
|
from models import Article, Blog, Book, Category, Person, Tag, Visa
|
||||||
|
|
||||||
|
class TestCaseFixtureLoadingTests(TestCase):
|
||||||
|
fixtures = ['fixture1.json', 'fixture2.json']
|
||||||
|
|
||||||
|
def testClassFixtures(self):
|
||||||
|
"Check that test case has installed 4 fixture objects"
|
||||||
|
self.assertEqual(Article.objects.count(), 4)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Django conquers world!>',
|
||||||
|
'<Article: Copyright is fine the way it is>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
class FixtureLoadingTests(TestCase):
|
||||||
|
|
||||||
|
def _dumpdata_assert(self, args, output, format='json', natural_keys=False):
|
||||||
|
new_io = StringIO.StringIO()
|
||||||
|
management.call_command('dumpdata', *args, **{'format':format, 'stdout':new_io, 'use_natural_keys':natural_keys})
|
||||||
|
command_output = new_io.getvalue().strip()
|
||||||
|
self.assertEqual(command_output, output)
|
||||||
|
|
||||||
|
def test_initial_data(self):
|
||||||
|
# Syncdb introduces 1 initial data object from initial_data.json.
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_loading_and_dumping(self):
|
||||||
|
new_io = StringIO.StringIO()
|
||||||
|
|
||||||
|
# Load fixture 1. Single JSON file, with two objects.
|
||||||
|
management.call_command('loaddata', 'fixture1.json', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Time to reform copyright>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Dump the current contents of the database as a JSON fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Try just dumping the contents of fixtures.Category
|
||||||
|
self._dumpdata_assert(['fixtures.Category'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}]')
|
||||||
|
|
||||||
|
# ...and just fixtures.Article
|
||||||
|
self._dumpdata_assert(['fixtures.Article'], '[{"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# ...and both
|
||||||
|
self._dumpdata_assert(['fixtures.Category', 'fixtures.Article'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Specify a specific model twice
|
||||||
|
self._dumpdata_assert(['fixtures.Article', 'fixtures.Article'], '[{"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Specify a dump that specifies Article both explicitly and implicitly
|
||||||
|
self._dumpdata_assert(['fixtures.Article', 'fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Same again, but specify in the reverse order
|
||||||
|
self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Specify one model from one application, and an entire other application.
|
||||||
|
self._dumpdata_assert(['fixtures.Category', 'sites'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]')
|
||||||
|
|
||||||
|
# Load fixture 2. JSON file imported by default. Overwrites some existing objects
|
||||||
|
management.call_command('loaddata', 'fixture2.json', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Django conquers world!>',
|
||||||
|
'<Article: Copyright is fine the way it is>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load fixture 3, XML format.
|
||||||
|
management.call_command('loaddata', 'fixture3.xml', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: XML identified as leading cause of cancer>',
|
||||||
|
'<Article: Django conquers world!>',
|
||||||
|
'<Article: Copyright is fine the way it is>',
|
||||||
|
'<Article: Poker on TV is great!>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne.
|
||||||
|
management.call_command('loaddata', 'fixture6.json', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Tag.objects.all(), [
|
||||||
|
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
|
||||||
|
'<Tag: <Article: Copyright is fine the way it is> tagged "law">'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne.
|
||||||
|
management.call_command('loaddata', 'fixture7.xml', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Tag.objects.all(), [
|
||||||
|
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
|
||||||
|
'<Tag: <Article: Copyright is fine the way it is> tagged "legal">',
|
||||||
|
'<Tag: <Article: Django conquers world!> tagged "django">',
|
||||||
|
'<Tag: <Article: Django conquers world!> tagged "world domination">'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany.
|
||||||
|
management.call_command('loaddata', 'fixture8.json', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Visa.objects.all(), [
|
||||||
|
'<Visa: Django Reinhardt Can add user, Can change user, Can delete user>',
|
||||||
|
'<Visa: Stephane Grappelli Can add user>',
|
||||||
|
'<Visa: Prince >'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany.
|
||||||
|
management.call_command('loaddata', 'fixture9.xml', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Visa.objects.all(), [
|
||||||
|
'<Visa: Django Reinhardt Can add user, Can change user, Can delete user>',
|
||||||
|
'<Visa: Stephane Grappelli Can add user, Can delete user>',
|
||||||
|
'<Visa: Artist formerly known as "Prince" Can change user>'
|
||||||
|
])
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(Book.objects.all(), [
|
||||||
|
'<Book: Music for all ages by Artist formerly known as "Prince" and Django Reinhardt>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Load a fixture that doesn't exist
|
||||||
|
management.call_command('loaddata', 'unknown.json', verbosity=0, commit=False)
|
||||||
|
|
||||||
|
# object list is unaffected
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: XML identified as leading cause of cancer>',
|
||||||
|
'<Article: Django conquers world!>',
|
||||||
|
'<Article: Copyright is fine the way it is>',
|
||||||
|
'<Article: Poker on TV is great!>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# By default, you get raw keys on dumpdata
|
||||||
|
self._dumpdata_assert(['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [3, 1]}}]')
|
||||||
|
|
||||||
|
# But you can get natural keys if you ask for them and they are available
|
||||||
|
self._dumpdata_assert(['fixtures.book'], '[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]', natural_keys=True)
|
||||||
|
|
||||||
|
# Dump the current contents of the database as a JSON fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 5, "model": "fixtures.article", "fields": {"headline": "XML identified as leading cause of cancer", "pub_date": "2006-06-16 16:00:00"}}, {"pk": 4, "model": "fixtures.article", "fields": {"headline": "Django conquers world!", "pub_date": "2006-06-16 15:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16 14:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker on TV is great!", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "legal", "tagged_id": 3}}, {"pk": 3, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "django", "tagged_id": 4}}, {"pk": 4, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "world domination", "tagged_id": 4}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Artist formerly known as \\"Prince\\""}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 1, "model": "fixtures.visa", "fields": {"person": ["Django Reinhardt"], "permissions": [["add_user", "auth", "user"], ["change_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 2, "model": "fixtures.visa", "fields": {"person": ["Stephane Grappelli"], "permissions": [["add_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 3, "model": "fixtures.visa", "fields": {"person": ["Artist formerly known as \\"Prince\\""], "permissions": [["change_user", "auth", "user"]]}}, {"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]', natural_keys=True)
|
||||||
|
|
||||||
|
# Dump the current contents of the database as an XML fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], """<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="5" model="fixtures.article"><field type="CharField" name="headline">XML identified as leading cause of cancer</field><field type="DateTimeField" name="pub_date">2006-06-16 16:00:00</field></object><object pk="4" model="fixtures.article"><field type="CharField" name="headline">Django conquers world!</field><field type="DateTimeField" name="pub_date">2006-06-16 15:00:00</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Copyright is fine the way it is</field><field type="DateTimeField" name="pub_date">2006-06-16 14:00:00</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker on TV is great!</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.article"><field type="CharField" name="headline">Python program becomes self aware</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">legal</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="3" model="fixtures.tag"><field type="CharField" name="name">django</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="4" model="fixtures.tag"><field type="CharField" name="name">world domination</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Artist formerly known as "Prince"</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object><object pk="1" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Django Reinhardt</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="2" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Stephane Grappelli</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="3" model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Artist formerly known as "Prince"</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>change_user</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="1" model="fixtures.book"><field type="CharField" name="name">Music for all ages</field><field to="fixtures.person" name="authors" rel="ManyToManyRel"><object><natural>Artist formerly known as "Prince"</natural></object><object><natural>Django Reinhardt</natural></object></field></object></django-objects>""", format='xml', natural_keys=True)
|
||||||
|
|
||||||
|
def test_compress_format_loading(self):
|
||||||
|
# Load fixture 4 (compressed), using format specification
|
||||||
|
management.call_command('loaddata', 'fixture4.json', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Django pets kitten>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_compressed_specified_loading(self):
|
||||||
|
# Load fixture 5 (compressed), using format *and* compression specification
|
||||||
|
management.call_command('loaddata', 'fixture5.json.zip', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: WoW subscribers now outnumber readers>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_compressed_loading(self):
|
||||||
|
# Load fixture 5 (compressed), only compression specification
|
||||||
|
management.call_command('loaddata', 'fixture5.zip', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: WoW subscribers now outnumber readers>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_ambiguous_compressed_fixture(self):
|
||||||
|
# The name "fixture5" is ambigous, so loading it will raise an error
|
||||||
|
new_io = StringIO.StringIO()
|
||||||
|
management.call_command('loaddata', 'fixture5', verbosity=0, stderr=new_io, commit=False)
|
||||||
|
output = new_io.getvalue().strip().split('\n')
|
||||||
|
self.assertEqual(len(output), 1)
|
||||||
|
self.assertTrue(output[0].startswith("Multiple fixtures named 'fixture5'"))
|
||||||
|
|
||||||
|
def test_db_loading(self):
|
||||||
|
# Load db fixtures 1 and 2. These will load using the 'default' database identifier implicitly
|
||||||
|
management.call_command('loaddata', 'db_fixture_1', verbosity=0, commit=False)
|
||||||
|
management.call_command('loaddata', 'db_fixture_2', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Who needs more than one database?>',
|
||||||
|
'<Article: Who needs to use compressed data?>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_loading_using(self):
|
||||||
|
# Load db fixtures 1 and 2. These will load using the 'default' database identifier explicitly
|
||||||
|
management.call_command('loaddata', 'db_fixture_1', verbosity=0, using='default', commit=False)
|
||||||
|
management.call_command('loaddata', 'db_fixture_2', verbosity=0, using='default', commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Who needs more than one database?>',
|
||||||
|
'<Article: Who needs to use compressed data?>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_unmatched_identifier_loading(self):
|
||||||
|
# Try to load db fixture 3. This won't load because the database identifier doesn't match
|
||||||
|
management.call_command('loaddata', 'db_fixture_3', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
management.call_command('loaddata', 'db_fixture_3', verbosity=0, using='default', commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_output_formats(self):
|
||||||
|
# Load back in fixture 1, we need the articles from it
|
||||||
|
management.call_command('loaddata', 'fixture1', verbosity=0, commit=False)
|
||||||
|
|
||||||
|
# Try to load fixture 6 using format discovery
|
||||||
|
management.call_command('loaddata', 'fixture6', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Tag.objects.all(), [
|
||||||
|
'<Tag: <Article: Time to reform copyright> tagged "copyright">',
|
||||||
|
'<Tag: <Article: Time to reform copyright> tagged "law">'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Dump the current contents of the database as a JSON fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}, {"pk": 1, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": 3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "law", "tagged_id": 3}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}]', natural_keys=True)
|
||||||
|
|
||||||
|
# Dump the current contents of the database as an XML fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], """<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<django-objects version="1.0"><object pk="1" model="fixtures.category"><field type="CharField" name="title">News Stories</field><field type="TextField" name="description">Latest news stories</field></object><object pk="3" model="fixtures.article"><field type="CharField" name="headline">Time to reform copyright</field><field type="DateTimeField" name="pub_date">2006-06-16 13:00:00</field></object><object pk="2" model="fixtures.article"><field type="CharField" name="headline">Poker has no place on ESPN</field><field type="DateTimeField" name="pub_date">2006-06-16 12:00:00</field></object><object pk="1" model="fixtures.article"><field type="CharField" name="headline">Python program becomes self aware</field><field type="DateTimeField" name="pub_date">2006-06-16 11:00:00</field></object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">law</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field></object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">Prince</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli</field></object></django-objects>""", format='xml', natural_keys=True)
|
||||||
|
|
||||||
|
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
|
||||||
|
class FixtureTransactionTests(TransactionTestCase):
|
||||||
|
def _dumpdata_assert(self, args, output, format='json'):
|
||||||
|
new_io = StringIO.StringIO()
|
||||||
|
management.call_command('dumpdata', *args, **{'format':format, 'stdout':new_io})
|
||||||
|
command_output = new_io.getvalue().strip()
|
||||||
|
self.assertEqual(command_output, output)
|
||||||
|
|
||||||
|
def test_format_discovery(self):
|
||||||
|
# Load fixture 1 again, using format discovery
|
||||||
|
management.call_command('loaddata', 'fixture1', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Time to reform copyright>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Try to load fixture 2 using format discovery; this will fail
|
||||||
|
# because there are two fixture2's in the fixtures directory
|
||||||
|
new_io = StringIO.StringIO()
|
||||||
|
management.call_command('loaddata', 'fixture2', verbosity=0, stderr=new_io)
|
||||||
|
output = new_io.getvalue().strip().split('\n')
|
||||||
|
self.assertEqual(len(output), 1)
|
||||||
|
self.assertTrue(output[0].startswith("Multiple fixtures named 'fixture2'"))
|
||||||
|
|
||||||
|
# object list is unaffected
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Time to reform copyright>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
||||||
|
|
||||||
|
# Dump the current contents of the database as a JSON fixture
|
||||||
|
self._dumpdata_assert(['fixtures'], '[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": "News Stories"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16 13:00:00"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", "pub_date": "2006-06-16 12:00:00"}}, {"pk": 1, "model": "fixtures.article", "fields": {"headline": "Python program becomes self aware", "pub_date": "2006-06-16 11:00:00"}}]')
|
||||||
|
|
||||||
|
# Load fixture 4 (compressed), using format discovery
|
||||||
|
management.call_command('loaddata', 'fixture4', verbosity=0, commit=False)
|
||||||
|
self.assertQuerysetEqual(Article.objects.all(), [
|
||||||
|
'<Article: Django pets kitten>',
|
||||||
|
'<Article: Time to reform copyright>',
|
||||||
|
'<Article: Poker has no place on ESPN>',
|
||||||
|
'<Article: Python program becomes self aware>'
|
||||||
|
])
|
@ -23,9 +23,9 @@ class ProxyModelInheritanceTests(TransactionTestCase):
|
|||||||
settings.INSTALLED_APPS = ('app1', 'app2')
|
settings.INSTALLED_APPS = ('app1', 'app2')
|
||||||
map(load_app, settings.INSTALLED_APPS)
|
map(load_app, settings.INSTALLED_APPS)
|
||||||
call_command('syncdb', verbosity=0)
|
call_command('syncdb', verbosity=0)
|
||||||
|
global ProxyModel, NiceModel
|
||||||
from app1.models import ProxyModel
|
from app1.models import ProxyModel
|
||||||
from app2.models import NiceModel
|
from app2.models import NiceModel
|
||||||
global ProxyModel, NiceModel
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
settings.INSTALLED_APPS = self.old_installed_apps
|
settings.INSTALLED_APPS = self.old_installed_apps
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
|
from django.conf import settings
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db import connection
|
from django.db import connection, DEFAULT_DB_ALIAS
|
||||||
|
|
||||||
|
|
||||||
class Square(models.Model):
|
class Square(models.Model):
|
||||||
root = models.IntegerField()
|
root = models.IntegerField()
|
||||||
@ -8,6 +10,7 @@ class Square(models.Model):
|
|||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return "%s ** 2 == %s" % (self.root, self.square)
|
return "%s ** 2 == %s" % (self.root, self.square)
|
||||||
|
|
||||||
|
|
||||||
class Person(models.Model):
|
class Person(models.Model):
|
||||||
first_name = models.CharField(max_length=20)
|
first_name = models.CharField(max_length=20)
|
||||||
last_name = models.CharField(max_length=20)
|
last_name = models.CharField(max_length=20)
|
||||||
@ -15,11 +18,25 @@ class Person(models.Model):
|
|||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return u'%s %s' % (self.first_name, self.last_name)
|
return u'%s %s' % (self.first_name, self.last_name)
|
||||||
|
|
||||||
|
|
||||||
class SchoolClass(models.Model):
|
class SchoolClass(models.Model):
|
||||||
year = models.PositiveIntegerField()
|
year = models.PositiveIntegerField()
|
||||||
day = models.CharField(max_length=9, blank=True)
|
day = models.CharField(max_length=9, blank=True)
|
||||||
last_updated = models.DateTimeField()
|
last_updated = models.DateTimeField()
|
||||||
|
|
||||||
|
# Unfortunately, the following model breaks MySQL hard.
|
||||||
|
# Until #13711 is fixed, this test can't be run under MySQL.
|
||||||
|
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
|
||||||
|
class VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ(models.Model):
|
||||||
|
class Meta:
|
||||||
|
# We need to use a short actual table name or
|
||||||
|
# we hit issue #8548 which we're not testing!
|
||||||
|
verbose_name = 'model_with_long_table_name'
|
||||||
|
primary_key_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.AutoField(primary_key=True)
|
||||||
|
charfield_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.CharField(max_length=100)
|
||||||
|
m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.ManyToManyField(Person,blank=True)
|
||||||
|
|
||||||
|
|
||||||
qn = connection.ops.quote_name
|
qn = connection.ops.quote_name
|
||||||
|
|
||||||
__test__ = {'API_TESTS': """
|
__test__ = {'API_TESTS': """
|
||||||
|
@ -1,13 +1,17 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Unit and doctests for specific database backends.
|
# Unit and doctests for specific database backends.
|
||||||
import datetime
|
import datetime
|
||||||
import models
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core import management
|
||||||
|
from django.core.management.color import no_style
|
||||||
from django.db import backend, connection, DEFAULT_DB_ALIAS
|
from django.db import backend, connection, DEFAULT_DB_ALIAS
|
||||||
from django.db.backends.signals import connection_created
|
from django.db.backends.signals import connection_created
|
||||||
from django.conf import settings
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from regressiontests.backends import models
|
||||||
|
|
||||||
class Callproc(unittest.TestCase):
|
class Callproc(unittest.TestCase):
|
||||||
|
|
||||||
def test_dbms_session(self):
|
def test_dbms_session(self):
|
||||||
@ -76,6 +80,7 @@ class DateQuotingTest(TestCase):
|
|||||||
classes = models.SchoolClass.objects.filter(last_updated__day=20)
|
classes = models.SchoolClass.objects.filter(last_updated__day=20)
|
||||||
self.assertEqual(len(classes), 1)
|
self.assertEqual(len(classes), 1)
|
||||||
|
|
||||||
|
|
||||||
class ParameterHandlingTest(TestCase):
|
class ParameterHandlingTest(TestCase):
|
||||||
def test_bad_parameter_count(self):
|
def test_bad_parameter_count(self):
|
||||||
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
|
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
|
||||||
@ -88,6 +93,50 @@ class ParameterHandlingTest(TestCase):
|
|||||||
self.assertRaises(Exception, cursor.executemany, query, [(1,2,3),])
|
self.assertRaises(Exception, cursor.executemany, query, [(1,2,3),])
|
||||||
self.assertRaises(Exception, cursor.executemany, query, [(1,),])
|
self.assertRaises(Exception, cursor.executemany, query, [(1,),])
|
||||||
|
|
||||||
|
# Unfortunately, the following tests would be a good test to run on all
|
||||||
|
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
|
||||||
|
# everywhere (although it would be an effective test of #13711).
|
||||||
|
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
|
||||||
|
class LongNameTest(TestCase):
|
||||||
|
"""Long primary keys and model names can result in a sequence name
|
||||||
|
that exceeds the database limits, which will result in truncation
|
||||||
|
on certain databases (e.g., Postgres). The backend needs to use
|
||||||
|
the correct sequence name in last_insert_id and other places, so
|
||||||
|
check it is. Refs #8901.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_sequence_name_length_limits_create(self):
|
||||||
|
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
|
||||||
|
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
|
||||||
|
|
||||||
|
def test_sequence_name_length_limits_m2m(self):
|
||||||
|
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
|
||||||
|
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
|
||||||
|
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
|
||||||
|
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
|
||||||
|
|
||||||
|
def test_sequence_name_length_limits_flush(self):
|
||||||
|
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
|
||||||
|
# A full flush is expensive to the full test, so we dig into the
|
||||||
|
# internals to generate the likely offending SQL and run it manually
|
||||||
|
|
||||||
|
# Some convenience aliases
|
||||||
|
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
|
||||||
|
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
|
||||||
|
tables = [
|
||||||
|
VLM._meta.db_table,
|
||||||
|
VLM_m2m._meta.db_table,
|
||||||
|
]
|
||||||
|
sequences = [
|
||||||
|
{
|
||||||
|
'column': VLM._meta.pk.column,
|
||||||
|
'table': VLM._meta.db_table
|
||||||
|
},
|
||||||
|
]
|
||||||
|
cursor = connection.cursor()
|
||||||
|
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
|
||||||
|
cursor.execute(statement)
|
||||||
|
|
||||||
|
|
||||||
def connection_created_test(sender, **kwargs):
|
def connection_created_test(sender, **kwargs):
|
||||||
print 'connection_created signal'
|
print 'connection_created signal'
|
||||||
|
@ -1,476 +1,228 @@
|
|||||||
"""
|
import copy
|
||||||
###################
|
import pickle
|
||||||
# Empty QueryDict #
|
import unittest
|
||||||
###################
|
from django.http import QueryDict, HttpResponse, CompatCookie, BadHeaderError
|
||||||
|
|
||||||
>>> q = QueryDict('')
|
class QueryDictTests(unittest.TestCase):
|
||||||
|
def test_missing_key(self):
|
||||||
>>> q['foo']
|
q = QueryDict('')
|
||||||
Traceback (most recent call last):
|
self.assertRaises(KeyError, q.__getitem__, 'foo')
|
||||||
...
|
|
||||||
MultiValueDictKeyError: "Key 'foo' not found in <QueryDict: {}>"
|
def test_immutability(self):
|
||||||
|
q = QueryDict('')
|
||||||
>>> q['something'] = 'bar'
|
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
|
||||||
Traceback (most recent call last):
|
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
|
||||||
...
|
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
|
||||||
|
self.assertRaises(AttributeError, q.pop, 'foo')
|
||||||
>>> q.get('foo', 'default')
|
self.assertRaises(AttributeError, q.popitem)
|
||||||
'default'
|
self.assertRaises(AttributeError, q.clear)
|
||||||
|
|
||||||
>>> q.getlist('foo')
|
def test_immutable_get_with_default(self):
|
||||||
[]
|
q = QueryDict('')
|
||||||
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
||||||
>>> q.setlist('foo', ['bar', 'baz'])
|
|
||||||
Traceback (most recent call last):
|
def test_immutable_basic_operations(self):
|
||||||
...
|
q = QueryDict('')
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertEqual(q.getlist('foo'), [])
|
||||||
|
self.assertEqual(q.has_key('foo'), False)
|
||||||
>>> q.appendlist('foo', ['bar'])
|
self.assertEqual('foo' in q, False)
|
||||||
Traceback (most recent call last):
|
self.assertEqual(q.items(), [])
|
||||||
...
|
self.assertEqual(q.lists(), [])
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertEqual(q.items(), [])
|
||||||
|
self.assertEqual(q.keys(), [])
|
||||||
>>> q.has_key('foo')
|
self.assertEqual(q.values(), [])
|
||||||
False
|
self.assertEqual(len(q), 0)
|
||||||
|
self.assertEqual(q.urlencode(), '')
|
||||||
>>> 'foo' in q
|
|
||||||
False
|
def test_single_key_value(self):
|
||||||
|
"""Test QueryDict with one key/value pair"""
|
||||||
>>> q.items()
|
|
||||||
[]
|
q = QueryDict('foo=bar')
|
||||||
|
self.assertEqual(q['foo'], 'bar')
|
||||||
>>> q.lists()
|
self.assertRaises(KeyError, q.__getitem__, 'bar')
|
||||||
[]
|
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
|
||||||
|
|
||||||
>>> q.keys()
|
self.assertEqual(q.get('foo', 'default'), 'bar')
|
||||||
[]
|
self.assertEqual(q.get('bar', 'default'), 'default')
|
||||||
|
self.assertEqual(q.getlist('foo'), ['bar'])
|
||||||
>>> q.values()
|
self.assertEqual(q.getlist('bar'), [])
|
||||||
[]
|
|
||||||
|
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
|
||||||
>>> len(q)
|
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
|
||||||
0
|
|
||||||
|
self.failUnless(q.has_key('foo'))
|
||||||
>>> q.update({'foo': 'bar'})
|
self.failUnless('foo' in q)
|
||||||
Traceback (most recent call last):
|
self.failIf(q.has_key('bar'))
|
||||||
...
|
self.failIf('bar' in q)
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
self.assertEqual(q.items(), [(u'foo', u'bar')])
|
||||||
>>> q.pop('foo')
|
self.assertEqual(q.lists(), [(u'foo', [u'bar'])])
|
||||||
Traceback (most recent call last):
|
self.assertEqual(q.keys(), ['foo'])
|
||||||
...
|
self.assertEqual(q.values(), ['bar'])
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertEqual(len(q), 1)
|
||||||
|
|
||||||
>>> q.popitem()
|
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
|
||||||
Traceback (most recent call last):
|
self.assertRaises(AttributeError, q.pop, 'foo')
|
||||||
...
|
self.assertRaises(AttributeError, q.popitem)
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertRaises(AttributeError, q.clear)
|
||||||
|
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
|
||||||
>>> q.clear()
|
|
||||||
Traceback (most recent call last):
|
self.assertEqual(q.urlencode(), 'foo=bar')
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
def test_mutable_copy(self):
|
||||||
|
"""A copy of a QueryDict is mutable."""
|
||||||
>>> q.setdefault('foo', 'bar')
|
q = QueryDict('').copy()
|
||||||
Traceback (most recent call last):
|
self.assertRaises(KeyError, q.__getitem__, "foo")
|
||||||
...
|
q['name'] = 'john'
|
||||||
AttributeError: This QueryDict instance is immutable
|
self.assertEqual(q['name'], 'john')
|
||||||
|
|
||||||
>>> q.urlencode()
|
def test_mutable_delete(self):
|
||||||
''
|
q = QueryDict('').copy()
|
||||||
|
q['name'] = 'john'
|
||||||
###################################
|
del q['name']
|
||||||
# Mutable copy of empty QueryDict #
|
self.failIf('name' in q)
|
||||||
###################################
|
|
||||||
|
def test_basic_mutable_operations(self):
|
||||||
>>> q = q.copy()
|
q = QueryDict('').copy()
|
||||||
|
q['name'] = 'john'
|
||||||
>>> q['foo']
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
||||||
Traceback (most recent call last):
|
self.assertEqual(q.get('name', 'default'), 'john')
|
||||||
...
|
self.assertEqual(q.getlist('name'), ['john'])
|
||||||
MultiValueDictKeyError: "Key 'foo' not found in <QueryDict: {}>"
|
self.assertEqual(q.getlist('foo'), [])
|
||||||
|
|
||||||
>>> q['name'] = 'john'
|
q.setlist('foo', ['bar', 'baz'])
|
||||||
|
self.assertEqual(q.get('foo', 'default'), 'baz')
|
||||||
>>> q['name']
|
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
|
||||||
u'john'
|
|
||||||
|
q.appendlist('foo', 'another')
|
||||||
>>> del q['name']
|
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
|
||||||
>>> 'name' in q
|
self.assertEqual(q['foo'], 'another')
|
||||||
False
|
self.failUnless(q.has_key('foo'))
|
||||||
|
self.failUnless('foo' in q)
|
||||||
>>> q['name'] = 'john'
|
|
||||||
|
self.assertEqual(q.items(), [(u'foo', u'another'), (u'name', u'john')])
|
||||||
>>> q.get('foo', 'default')
|
self.assertEqual(q.lists(), [(u'foo', [u'bar', u'baz', u'another']), (u'name', [u'john'])])
|
||||||
'default'
|
self.assertEqual(q.keys(), [u'foo', u'name'])
|
||||||
|
self.assertEqual(q.values(), [u'another', u'john'])
|
||||||
>>> q.get('name', 'default')
|
self.assertEqual(len(q), 2)
|
||||||
u'john'
|
|
||||||
|
q.update({'foo': 'hello'})
|
||||||
>>> q.getlist('name')
|
self.assertEqual(q['foo'], 'hello')
|
||||||
[u'john']
|
self.assertEqual(q.get('foo', 'not available'), 'hello')
|
||||||
|
self.assertEqual(q.getlist('foo'), [u'bar', u'baz', u'another', u'hello'])
|
||||||
>>> q.getlist('foo')
|
self.assertEqual(q.pop('foo'), [u'bar', u'baz', u'another', u'hello'])
|
||||||
[]
|
self.assertEqual(q.pop('foo', 'not there'), 'not there')
|
||||||
|
self.assertEqual(q.get('foo', 'not there'), 'not there')
|
||||||
>>> q.setlist('foo', ['bar', 'baz'])
|
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
|
||||||
|
self.assertEqual(q['foo'], 'bar')
|
||||||
>>> q.get('foo', 'default')
|
self.assertEqual(q.getlist('foo'), ['bar'])
|
||||||
u'baz'
|
self.assertEqual(q.urlencode(), 'foo=bar&name=john')
|
||||||
|
|
||||||
>>> q.getlist('foo')
|
q.clear()
|
||||||
[u'bar', u'baz']
|
self.assertEqual(len(q), 0)
|
||||||
|
|
||||||
>>> q.appendlist('foo', 'another')
|
def test_multiple_keys(self):
|
||||||
|
"""Test QueryDict with two key/value pairs with same keys."""
|
||||||
>>> q.getlist('foo')
|
|
||||||
[u'bar', u'baz', u'another']
|
q = QueryDict('vote=yes&vote=no')
|
||||||
|
|
||||||
>>> q['foo']
|
self.assertEqual(q['vote'], u'no')
|
||||||
u'another'
|
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
|
||||||
|
|
||||||
>>> q.has_key('foo')
|
self.assertEqual(q.get('vote', 'default'), u'no')
|
||||||
True
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
||||||
|
self.assertEqual(q.getlist('vote'), [u'yes', u'no'])
|
||||||
>>> 'foo' in q
|
self.assertEqual(q.getlist('foo'), [])
|
||||||
True
|
|
||||||
|
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
|
||||||
>>> q.items()
|
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
|
||||||
[(u'foo', u'another'), (u'name', u'john')]
|
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
|
||||||
|
|
||||||
>>> q.lists()
|
self.assertEqual(q.has_key('vote'), True)
|
||||||
[(u'foo', [u'bar', u'baz', u'another']), (u'name', [u'john'])]
|
self.assertEqual('vote' in q, True)
|
||||||
|
self.assertEqual(q.has_key('foo'), False)
|
||||||
>>> q.keys()
|
self.assertEqual('foo' in q, False)
|
||||||
[u'foo', u'name']
|
self.assertEqual(q.items(), [(u'vote', u'no')])
|
||||||
|
self.assertEqual(q.lists(), [(u'vote', [u'yes', u'no'])])
|
||||||
>>> q.values()
|
self.assertEqual(q.keys(), [u'vote'])
|
||||||
[u'another', u'john']
|
self.assertEqual(q.values(), [u'no'])
|
||||||
|
self.assertEqual(len(q), 1)
|
||||||
>>> len(q)
|
|
||||||
2
|
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
|
||||||
|
self.assertRaises(AttributeError, q.pop, 'foo')
|
||||||
>>> q.update({'foo': 'hello'})
|
self.assertRaises(AttributeError, q.popitem)
|
||||||
|
self.assertRaises(AttributeError, q.clear)
|
||||||
# Displays last value
|
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
|
||||||
>>> q['foo']
|
self.assertRaises(AttributeError, q.__delitem__, 'vote')
|
||||||
u'hello'
|
|
||||||
|
def test_invalid_input_encoding(self):
|
||||||
>>> q.get('foo', 'not available')
|
"""
|
||||||
u'hello'
|
QueryDicts must be able to handle invalid input encoding (in this
|
||||||
|
case, bad UTF-8 encoding).
|
||||||
>>> q.getlist('foo')
|
"""
|
||||||
[u'bar', u'baz', u'another', u'hello']
|
q = QueryDict('foo=bar&foo=\xff')
|
||||||
|
self.assertEqual(q['foo'], u'\ufffd')
|
||||||
>>> q.pop('foo')
|
self.assertEqual(q.getlist('foo'), [u'bar', u'\ufffd'])
|
||||||
[u'bar', u'baz', u'another', u'hello']
|
|
||||||
|
def test_pickle(self):
|
||||||
>>> q.pop('foo', 'not there')
|
q = QueryDict('')
|
||||||
'not there'
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
||||||
|
self.assertEqual(q == q1, True)
|
||||||
>>> q.get('foo', 'not there')
|
q = QueryDict('a=b&c=d')
|
||||||
'not there'
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
||||||
|
self.assertEqual(q == q1, True)
|
||||||
>>> q.setdefault('foo', 'bar')
|
q = QueryDict('a=b&c=d&a=1')
|
||||||
u'bar'
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
||||||
|
self.assertEqual(q == q1 , True)
|
||||||
>>> q['foo']
|
|
||||||
u'bar'
|
def test_update_from_querydict(self):
|
||||||
|
"""Regression test for #8278: QueryDict.update(QueryDict)"""
|
||||||
>>> q.getlist('foo')
|
x = QueryDict("a=1&a=2", mutable=True)
|
||||||
[u'bar']
|
y = QueryDict("a=3&a=4")
|
||||||
|
x.update(y)
|
||||||
>>> q.urlencode()
|
self.assertEqual(x.getlist('a'), [u'1', u'2', u'3', u'4'])
|
||||||
'foo=bar&name=john'
|
|
||||||
|
def test_non_default_encoding(self):
|
||||||
>>> q.clear()
|
"""#13572 - QueryDict with a non-default encoding"""
|
||||||
|
q = QueryDict('sbb=one', encoding='rot_13')
|
||||||
>>> len(q)
|
self.assertEqual(q.encoding , 'rot_13' )
|
||||||
0
|
self.assertEqual(q.items() , [(u'foo', u'bar')] )
|
||||||
|
self.assertEqual(q.urlencode() , 'sbb=one' )
|
||||||
#####################################
|
q = q.copy()
|
||||||
# QueryDict with one key/value pair #
|
self.assertEqual(q.encoding , 'rot_13' )
|
||||||
#####################################
|
self.assertEqual(q.items() , [(u'foo', u'bar')] )
|
||||||
|
self.assertEqual(q.urlencode() , 'sbb=one' )
|
||||||
>>> q = QueryDict('foo=bar')
|
self.assertEqual(copy.copy(q).encoding , 'rot_13' )
|
||||||
|
self.assertEqual(copy.deepcopy(q).encoding , 'rot_13')
|
||||||
>>> q['foo']
|
|
||||||
u'bar'
|
class HttpResponseTests(unittest.TestCase):
|
||||||
|
def test_unicode_headers(self):
|
||||||
>>> q['bar']
|
r = HttpResponse()
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
# If we insert a unicode value it will be converted to an ascii
|
||||||
MultiValueDictKeyError: "Key 'bar' not found in <QueryDict: {u'foo': [u'bar']}>"
|
r['value'] = u'test value'
|
||||||
|
self.failUnless(isinstance(r['value'], str))
|
||||||
>>> q['something'] = 'bar'
|
|
||||||
Traceback (most recent call last):
|
# An error is raised When a unicode object with non-ascii is assigned.
|
||||||
...
|
self.assertRaises(UnicodeEncodeError, r.__setitem__, 'value', u't\xebst value')
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
# The response also converts unicode keys to strings.)
|
||||||
>>> q.get('foo', 'default')
|
r[u'test'] = 'testing key'
|
||||||
u'bar'
|
l = list(r.items())
|
||||||
|
l.sort()
|
||||||
>>> q.get('bar', 'default')
|
self.assertEqual(l[1], ('test', 'testing key'))
|
||||||
'default'
|
|
||||||
|
# It will also raise errors for keys with non-ascii data.
|
||||||
>>> q.getlist('foo')
|
self.assertRaises(UnicodeEncodeError, r.__setitem__, u't\xebst key', 'value')
|
||||||
[u'bar']
|
|
||||||
|
def test_newlines_in_headers(self):
|
||||||
>>> q.getlist('bar')
|
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
||||||
[]
|
r = HttpResponse()
|
||||||
|
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
|
||||||
>>> q.setlist('foo', ['bar', 'baz'])
|
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
class CookieTests(unittest.TestCase):
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.appendlist('foo', ['bar'])
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.has_key('foo')
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> 'foo' in q
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> q.has_key('bar')
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> 'bar' in q
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> q.items()
|
|
||||||
[(u'foo', u'bar')]
|
|
||||||
|
|
||||||
>>> q.lists()
|
|
||||||
[(u'foo', [u'bar'])]
|
|
||||||
|
|
||||||
>>> q.keys()
|
|
||||||
[u'foo']
|
|
||||||
|
|
||||||
>>> q.values()
|
|
||||||
[u'bar']
|
|
||||||
|
|
||||||
>>> len(q)
|
|
||||||
1
|
|
||||||
|
|
||||||
>>> q.update({'foo': 'bar'})
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.pop('foo')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.popitem()
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.clear()
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.setdefault('foo', 'bar')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.urlencode()
|
|
||||||
'foo=bar'
|
|
||||||
|
|
||||||
#####################################################
|
|
||||||
# QueryDict with two key/value pairs with same keys #
|
|
||||||
#####################################################
|
|
||||||
|
|
||||||
>>> q = QueryDict('vote=yes&vote=no')
|
|
||||||
|
|
||||||
>>> q['vote']
|
|
||||||
u'no'
|
|
||||||
|
|
||||||
>>> q['something'] = 'bar'
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.get('vote', 'default')
|
|
||||||
u'no'
|
|
||||||
|
|
||||||
>>> q.get('foo', 'default')
|
|
||||||
'default'
|
|
||||||
|
|
||||||
>>> q.getlist('vote')
|
|
||||||
[u'yes', u'no']
|
|
||||||
|
|
||||||
>>> q.getlist('foo')
|
|
||||||
[]
|
|
||||||
|
|
||||||
>>> q.setlist('foo', ['bar', 'baz'])
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.appendlist('foo', ['bar'])
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.has_key('vote')
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> 'vote' in q
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> q.has_key('foo')
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> 'foo' in q
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> q.items()
|
|
||||||
[(u'vote', u'no')]
|
|
||||||
|
|
||||||
>>> q.lists()
|
|
||||||
[(u'vote', [u'yes', u'no'])]
|
|
||||||
|
|
||||||
>>> q.keys()
|
|
||||||
[u'vote']
|
|
||||||
|
|
||||||
>>> q.values()
|
|
||||||
[u'no']
|
|
||||||
|
|
||||||
>>> len(q)
|
|
||||||
1
|
|
||||||
|
|
||||||
>>> q.update({'foo': 'bar'})
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.pop('foo')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.popitem()
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.clear()
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.setdefault('foo', 'bar')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
>>> q.urlencode()
|
|
||||||
'vote=yes&vote=no'
|
|
||||||
|
|
||||||
>>> del q['vote']
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: This QueryDict instance is immutable
|
|
||||||
|
|
||||||
# QueryDicts must be able to handle invalid input encoding (in this case, bad
|
|
||||||
# UTF-8 encoding).
|
|
||||||
>>> q = QueryDict('foo=bar&foo=\xff')
|
|
||||||
|
|
||||||
>>> q['foo']
|
|
||||||
u'\ufffd'
|
|
||||||
|
|
||||||
>>> q.getlist('foo')
|
|
||||||
[u'bar', u'\ufffd']
|
|
||||||
|
|
||||||
|
|
||||||
########################
|
|
||||||
# Pickling a QueryDict #
|
|
||||||
########################
|
|
||||||
>>> import pickle
|
|
||||||
>>> q = QueryDict('')
|
|
||||||
>>> q1 = pickle.loads(pickle.dumps(q, 2))
|
|
||||||
>>> q == q1
|
|
||||||
True
|
|
||||||
>>> q = QueryDict('a=b&c=d')
|
|
||||||
>>> q1 = pickle.loads(pickle.dumps(q, 2))
|
|
||||||
>>> q == q1
|
|
||||||
True
|
|
||||||
>>> q = QueryDict('a=b&c=d&a=1')
|
|
||||||
>>> q1 = pickle.loads(pickle.dumps(q, 2))
|
|
||||||
>>> q == q1
|
|
||||||
True
|
|
||||||
|
|
||||||
######################################
|
|
||||||
# HttpResponse with Unicode headers #
|
|
||||||
######################################
|
|
||||||
|
|
||||||
>>> r = HttpResponse()
|
|
||||||
|
|
||||||
If we insert a unicode value it will be converted to an ascii
|
|
||||||
string. This makes sure we comply with the HTTP specifications.
|
|
||||||
|
|
||||||
>>> r['value'] = u'test value'
|
|
||||||
>>> isinstance(r['value'], str)
|
|
||||||
True
|
|
||||||
|
|
||||||
An error is raised When a unicode object with non-ascii is assigned.
|
|
||||||
|
|
||||||
>>> r['value'] = u't\xebst value' # doctest:+ELLIPSIS
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
UnicodeEncodeError: ..., HTTP response headers must be in US-ASCII format
|
|
||||||
|
|
||||||
The response also converts unicode keys to strings.
|
|
||||||
|
|
||||||
>>> r[u'test'] = 'testing key'
|
|
||||||
>>> l = list(r.items())
|
|
||||||
>>> l.sort()
|
|
||||||
>>> l[1]
|
|
||||||
('test', 'testing key')
|
|
||||||
|
|
||||||
It will also raise errors for keys with non-ascii data.
|
|
||||||
|
|
||||||
>>> r[u't\xebst'] = 'testing key' # doctest:+ELLIPSIS
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
UnicodeEncodeError: ..., HTTP response headers must be in US-ASCII format
|
|
||||||
|
|
||||||
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
|
||||||
>>> r['test\\rstr'] = 'test'
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
BadHeaderError: Header values can't contain newlines (got 'test\\rstr')
|
|
||||||
|
|
||||||
>>> r['test\\nstr'] = 'test'
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
BadHeaderError: Header values can't contain newlines (got 'test\\nstr')
|
|
||||||
|
|
||||||
#
|
|
||||||
# Regression test for #8278: QueryDict.update(QueryDict)
|
|
||||||
#
|
|
||||||
>>> x = QueryDict("a=1&a=2", mutable=True)
|
|
||||||
>>> y = QueryDict("a=3&a=4")
|
|
||||||
>>> x.update(y)
|
|
||||||
>>> x.getlist('a')
|
|
||||||
[u'1', u'2', u'3', u'4']
|
|
||||||
"""
|
|
||||||
|
|
||||||
from django.http import QueryDict, HttpResponse, CompatCookie
|
|
||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
|
|
||||||
class Cookies(TestCase):
|
|
||||||
|
|
||||||
def test_encode(self):
|
def test_encode(self):
|
||||||
"""
|
"""
|
||||||
Test that we don't output tricky characters in encoded value
|
Test that we don't output tricky characters in encoded value
|
||||||
@ -502,7 +254,3 @@ class Cookies(TestCase):
|
|||||||
c2 = CompatCookie()
|
c2 = CompatCookie()
|
||||||
c2.load(c.output())
|
c2.load(c.output())
|
||||||
self.assertEqual(c['test'].value, c2['test'].value)
|
self.assertEqual(c['test'].value, c2['test'].value)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import doctest
|
|
||||||
doctest.testmod()
|
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core import management
|
from django.core import management
|
||||||
from django.db import models
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
# Forward declared intermediate model
|
# Forward declared intermediate model
|
||||||
class Membership(models.Model):
|
class Membership(models.Model):
|
||||||
person = models.ForeignKey('Person')
|
person = models.ForeignKey('Person')
|
||||||
@ -51,159 +53,3 @@ class Through(ThroughBase):
|
|||||||
class B(models.Model):
|
class B(models.Model):
|
||||||
b_text = models.CharField(max_length=20)
|
b_text = models.CharField(max_length=20)
|
||||||
a_list = models.ManyToManyField(A, through=Through)
|
a_list = models.ManyToManyField(A, through=Through)
|
||||||
|
|
||||||
|
|
||||||
__test__ = {'API_TESTS':"""
|
|
||||||
# Create some dummy data
|
|
||||||
>>> bob = Person.objects.create(name='Bob')
|
|
||||||
>>> jim = Person.objects.create(name='Jim')
|
|
||||||
|
|
||||||
>>> rock = Group.objects.create(name='Rock')
|
|
||||||
>>> roll = Group.objects.create(name='Roll')
|
|
||||||
|
|
||||||
>>> frank = User.objects.create_user('frank','frank@example.com','password')
|
|
||||||
>>> jane = User.objects.create_user('jane','jane@example.com','password')
|
|
||||||
|
|
||||||
# Now test that the forward declared Membership works
|
|
||||||
>>> Membership.objects.create(person=bob, group=rock)
|
|
||||||
<Membership: Bob is a member of Rock>
|
|
||||||
|
|
||||||
>>> Membership.objects.create(person=bob, group=roll)
|
|
||||||
<Membership: Bob is a member of Roll>
|
|
||||||
|
|
||||||
>>> Membership.objects.create(person=jim, group=rock)
|
|
||||||
<Membership: Jim is a member of Rock>
|
|
||||||
|
|
||||||
>>> bob.group_set.all()
|
|
||||||
[<Group: Rock>, <Group: Roll>]
|
|
||||||
|
|
||||||
>>> roll.members.all()
|
|
||||||
[<Person: Bob>]
|
|
||||||
|
|
||||||
# Error messages use the model name, not repr of the class name
|
|
||||||
>>> bob.group_set = []
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: Cannot set values on a ManyToManyField which specifies an intermediary model. Use m2m_through_regress.Membership's Manager instead.
|
|
||||||
|
|
||||||
>>> roll.members = []
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: Cannot set values on a ManyToManyField which specifies an intermediary model. Use m2m_through_regress.Membership's Manager instead.
|
|
||||||
|
|
||||||
>>> rock.members.create(name='Anne')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: Cannot use create() on a ManyToManyField which specifies an intermediary model. Use m2m_through_regress.Membership's Manager instead.
|
|
||||||
|
|
||||||
>>> bob.group_set.create(name='Funk')
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
AttributeError: Cannot use create() on a ManyToManyField which specifies an intermediary model. Use m2m_through_regress.Membership's Manager instead.
|
|
||||||
|
|
||||||
# Now test that the intermediate with a relationship outside
|
|
||||||
# the current app (i.e., UserMembership) workds
|
|
||||||
>>> UserMembership.objects.create(user=frank, group=rock)
|
|
||||||
<UserMembership: frank is a user and member of Rock>
|
|
||||||
|
|
||||||
>>> UserMembership.objects.create(user=frank, group=roll)
|
|
||||||
<UserMembership: frank is a user and member of Roll>
|
|
||||||
|
|
||||||
>>> UserMembership.objects.create(user=jane, group=rock)
|
|
||||||
<UserMembership: jane is a user and member of Rock>
|
|
||||||
|
|
||||||
>>> frank.group_set.all()
|
|
||||||
[<Group: Rock>, <Group: Roll>]
|
|
||||||
|
|
||||||
>>> roll.user_members.all()
|
|
||||||
[<User: frank>]
|
|
||||||
|
|
||||||
# Regression test for #8134 --
|
|
||||||
# m2m-through models shouldn't be serialized as m2m fields on the model.
|
|
||||||
|
|
||||||
# First, clean up a lot of objects we don't need.
|
|
||||||
# The serialization test only requires three objects to work -
|
|
||||||
# one for each end of the m2m, plus the through model.
|
|
||||||
|
|
||||||
>>> User.objects.all().delete()
|
|
||||||
>>> UserMembership.objects.all().delete()
|
|
||||||
>>> frank.delete()
|
|
||||||
>>> rock.delete()
|
|
||||||
>>> jim.delete()
|
|
||||||
|
|
||||||
# Dump the current contents of the database as a JSON fixture
|
|
||||||
>>> management.call_command('dumpdata', 'm2m_through_regress', format='json', indent=2)
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"pk": 2,
|
|
||||||
"model": "m2m_through_regress.membership",
|
|
||||||
"fields": {
|
|
||||||
"person": 1,
|
|
||||||
"price": 100,
|
|
||||||
"group": 2
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"pk": 1,
|
|
||||||
"model": "m2m_through_regress.person",
|
|
||||||
"fields": {
|
|
||||||
"name": "Bob"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"pk": 2,
|
|
||||||
"model": "m2m_through_regress.group",
|
|
||||||
"fields": {
|
|
||||||
"name": "Roll"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Check the XML serializer too, since it doesn't use the common implementation
|
|
||||||
>>> management.call_command('dumpdata', 'm2m_through_regress', format='xml', indent=2)
|
|
||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<django-objects version="1.0">
|
|
||||||
<object pk="2" model="m2m_through_regress.membership">
|
|
||||||
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">1</field>
|
|
||||||
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">2</field>
|
|
||||||
<field type="IntegerField" name="price">100</field>
|
|
||||||
</object>
|
|
||||||
<object pk="1" model="m2m_through_regress.person">
|
|
||||||
<field type="CharField" name="name">Bob</field>
|
|
||||||
</object>
|
|
||||||
<object pk="2" model="m2m_through_regress.group">
|
|
||||||
<field type="CharField" name="name">Roll</field>
|
|
||||||
</object>
|
|
||||||
</django-objects>
|
|
||||||
|
|
||||||
## Regression test for #8046:
|
|
||||||
Check that we don't involve too many copies of the intermediate table when
|
|
||||||
doing a join.
|
|
||||||
|
|
||||||
>>> bob = Person.objects.create(name='Bob')
|
|
||||||
>>> jim = Person.objects.create(name='Jim')
|
|
||||||
>>> rock = Group.objects.create(name='Rock')
|
|
||||||
>>> roll = Group.objects.create(name='Roll')
|
|
||||||
>>> _ = Membership.objects.create(person=bob, group=rock)
|
|
||||||
>>> _ = Membership.objects.create(person=jim, group=rock, price=50)
|
|
||||||
>>> _ = Membership.objects.create(person=bob, group=roll, price=50)
|
|
||||||
>>> rock.members.filter(membership__price=50)
|
|
||||||
[<Person: Jim>]
|
|
||||||
|
|
||||||
## Regression test for #8254
|
|
||||||
>>> bob.group_set.filter(membership__price=50)
|
|
||||||
[<Group: Roll>]
|
|
||||||
|
|
||||||
## Regression test for #9804
|
|
||||||
# Flush the database, just to make sure we can.
|
|
||||||
>>> management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
## Regression test for #11107
|
|
||||||
Ensure that sequences on m2m_through tables are being created for the through
|
|
||||||
model, not for a phantom auto-generated m2m table.
|
|
||||||
|
|
||||||
>>> management.call_command('loaddata', 'm2m_through', verbosity=0)
|
|
||||||
>>> management.call_command('dumpdata', 'm2m_through_regress', format='json')
|
|
||||||
[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]
|
|
||||||
|
|
||||||
"""}
|
|
||||||
|
126
tests/regressiontests/m2m_through_regress/tests.py
Normal file
126
tests/regressiontests/m2m_through_regress/tests.py
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
try:
|
||||||
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
from django.core import management
|
||||||
|
from django.contrib.auth.models import User
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
from models import Person, Group, Membership, UserMembership
|
||||||
|
|
||||||
|
|
||||||
|
class M2MThroughTestCase(TestCase):
|
||||||
|
def test_everything(self):
|
||||||
|
bob = Person.objects.create(name="Bob")
|
||||||
|
jim = Person.objects.create(name="Jim")
|
||||||
|
|
||||||
|
rock = Group.objects.create(name="Rock")
|
||||||
|
roll = Group.objects.create(name="Roll")
|
||||||
|
|
||||||
|
frank = User.objects.create_user("frank", "frank@example.com", "password")
|
||||||
|
jane = User.objects.create_user("jane", "jane@example.com", "password")
|
||||||
|
|
||||||
|
Membership.objects.create(person=bob, group=rock)
|
||||||
|
Membership.objects.create(person=bob, group=roll)
|
||||||
|
Membership.objects.create(person=jim, group=rock)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
bob.group_set.all(), [
|
||||||
|
"<Group: Rock>",
|
||||||
|
"<Group: Roll>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
roll.members.all(), [
|
||||||
|
"<Person: Bob>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertRaises(AttributeError, setattr, bob, "group_set", [])
|
||||||
|
self.assertRaises(AttributeError, setattr, roll, "members", [])
|
||||||
|
|
||||||
|
self.assertRaises(AttributeError, rock.members.create, name="Anne")
|
||||||
|
self.assertRaises(AttributeError, bob.group_set.create, name="Funk")
|
||||||
|
|
||||||
|
UserMembership.objects.create(user=frank, group=rock)
|
||||||
|
UserMembership.objects.create(user=frank, group=roll)
|
||||||
|
UserMembership.objects.create(user=jane, group=rock)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
frank.group_set.all(), [
|
||||||
|
"<Group: Rock>",
|
||||||
|
"<Group: Roll>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
roll.user_members.all(), [
|
||||||
|
"<User: frank>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_serialization(self):
|
||||||
|
"m2m-through models aren't serialized as m2m fields. Refs #8134"
|
||||||
|
|
||||||
|
p = Person.objects.create(name="Bob")
|
||||||
|
g = Group.objects.create(name="Roll")
|
||||||
|
Membership.objects.create(person=p, group=g)
|
||||||
|
|
||||||
|
out = StringIO()
|
||||||
|
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
|
||||||
|
self.assertEqual(out.getvalue().strip(), """[{"pk": 1, "model": "m2m_through_regress.membership", "fields": {"person": 1, "price": 100, "group": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Bob"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]""")
|
||||||
|
|
||||||
|
out = StringIO()
|
||||||
|
management.call_command("dumpdata", "m2m_through_regress", format="xml",
|
||||||
|
indent=2, stdout=out)
|
||||||
|
self.assertEqual(out.getvalue().strip(), """
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<django-objects version="1.0">
|
||||||
|
<object pk="1" model="m2m_through_regress.membership">
|
||||||
|
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">1</field>
|
||||||
|
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">1</field>
|
||||||
|
<field type="IntegerField" name="price">100</field>
|
||||||
|
</object>
|
||||||
|
<object pk="1" model="m2m_through_regress.person">
|
||||||
|
<field type="CharField" name="name">Bob</field>
|
||||||
|
</object>
|
||||||
|
<object pk="1" model="m2m_through_regress.group">
|
||||||
|
<field type="CharField" name="name">Roll</field>
|
||||||
|
</object>
|
||||||
|
</django-objects>
|
||||||
|
""".strip())
|
||||||
|
|
||||||
|
def test_join_trimming(self):
|
||||||
|
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
|
||||||
|
bob = Person.objects.create(name="Bob")
|
||||||
|
jim = Person.objects.create(name="Jim")
|
||||||
|
|
||||||
|
rock = Group.objects.create(name="Rock")
|
||||||
|
roll = Group.objects.create(name="Roll")
|
||||||
|
|
||||||
|
Membership.objects.create(person=bob, group=rock)
|
||||||
|
Membership.objects.create(person=jim, group=rock, price=50)
|
||||||
|
Membership.objects.create(person=bob, group=roll, price=50)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
rock.members.filter(membership__price=50), [
|
||||||
|
"<Person: Jim>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
bob.group_set.filter(membership__price=50), [
|
||||||
|
"<Group: Roll>",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
class ThroughLoadDataTestCase(TestCase):
|
||||||
|
fixtures = ["m2m_through"]
|
||||||
|
|
||||||
|
def test_sequence_creation(self):
|
||||||
|
"Check that sequences on an m2m_through are created for the through model, not a phantom auto-generated m2m table. Refs #11107"
|
||||||
|
out = StringIO()
|
||||||
|
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
|
||||||
|
self.assertEqual(out.getvalue().strip(), """[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]""")
|
@ -883,7 +883,13 @@ class QueryTestCase(TestCase):
|
|||||||
self.assertRaises(ValueError, str, qs.query)
|
self.assertRaises(ValueError, str, qs.query)
|
||||||
|
|
||||||
# Evaluating the query shouldn't work, either
|
# Evaluating the query shouldn't work, either
|
||||||
self.assertRaises(ValueError, list, qs)
|
try:
|
||||||
|
for obj in qs:
|
||||||
|
pass
|
||||||
|
self.fail('Iterating over query should raise ValueError')
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TestRouter(object):
|
class TestRouter(object):
|
||||||
# A test router. The behaviour is vaguely master/slave, but the
|
# A test router. The behaviour is vaguely master/slave, but the
|
||||||
@ -1491,19 +1497,10 @@ class AuthTestCase(TestCase):
|
|||||||
self.old_routers = router.routers
|
self.old_routers = router.routers
|
||||||
router.routers = [AuthRouter()]
|
router.routers = [AuthRouter()]
|
||||||
|
|
||||||
# Redirect stdout to a buffer so we can test
|
|
||||||
# the output of a management command
|
|
||||||
self.old_stdout = sys.stdout
|
|
||||||
self.stdout = StringIO()
|
|
||||||
sys.stdout = self.stdout
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
# Restore the 'other' database as an independent database
|
# Restore the 'other' database as an independent database
|
||||||
router.routers = self.old_routers
|
router.routers = self.old_routers
|
||||||
|
|
||||||
# Restore stdout
|
|
||||||
sys.stdout = self.old_stdout
|
|
||||||
|
|
||||||
def test_auth_manager(self):
|
def test_auth_manager(self):
|
||||||
"The methods on the auth manager obey database hints"
|
"The methods on the auth manager obey database hints"
|
||||||
# Create one user using default allocation policy
|
# Create one user using default allocation policy
|
||||||
@ -1539,14 +1536,16 @@ class AuthTestCase(TestCase):
|
|||||||
|
|
||||||
# Check that dumping the default database doesn't try to include auth
|
# Check that dumping the default database doesn't try to include auth
|
||||||
# because allow_syncdb prohibits auth on default
|
# because allow_syncdb prohibits auth on default
|
||||||
self.stdout.flush()
|
new_io = StringIO()
|
||||||
management.call_command('dumpdata', 'auth', format='json', database='default')
|
management.call_command('dumpdata', 'auth', format='json', database='default', stdout=new_io)
|
||||||
self.assertEquals(self.stdout.getvalue(), '[]\n')
|
command_output = new_io.getvalue().strip()
|
||||||
|
self.assertEqual(command_output, '[]')
|
||||||
|
|
||||||
# Check that dumping the other database does include auth
|
# Check that dumping the other database does include auth
|
||||||
self.stdout.flush()
|
new_io = StringIO()
|
||||||
management.call_command('dumpdata', 'auth', format='json', database='other')
|
management.call_command('dumpdata', 'auth', format='json', database='other', stdout=new_io)
|
||||||
self.assertTrue('alice@example.com' in self.stdout.getvalue())
|
command_output = new_io.getvalue().strip()
|
||||||
|
self.assertTrue('"email": "alice@example.com",' in command_output)
|
||||||
|
|
||||||
class UserProfileTestCase(TestCase):
|
class UserProfileTestCase(TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -10,14 +10,16 @@ forward, backwards and self references.
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import decimal
|
import decimal
|
||||||
import unittest
|
try:
|
||||||
from cStringIO import StringIO
|
from cStringIO import StringIO
|
||||||
|
except ImportError:
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
from django.utils.functional import curry
|
|
||||||
from django.core import serializers
|
|
||||||
from django.db import transaction, DEFAULT_DB_ALIAS
|
|
||||||
from django.core import management
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.core import serializers, management
|
||||||
|
from django.db import transaction, DEFAULT_DB_ALIAS
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.utils.functional import curry
|
||||||
|
|
||||||
from models import *
|
from models import *
|
||||||
|
|
||||||
@ -59,10 +61,10 @@ def im2m_create(pk, klass, data):
|
|||||||
|
|
||||||
def im_create(pk, klass, data):
|
def im_create(pk, klass, data):
|
||||||
instance = klass(id=pk)
|
instance = klass(id=pk)
|
||||||
setattr(instance, 'right_id', data['right'])
|
instance.right_id = data['right']
|
||||||
setattr(instance, 'left_id', data['left'])
|
instance.left_id = data['left']
|
||||||
if 'extra' in data:
|
if 'extra' in data:
|
||||||
setattr(instance, 'extra', data['extra'])
|
instance.extra = data['extra']
|
||||||
models.Model.save_base(instance, raw=True)
|
models.Model.save_base(instance, raw=True)
|
||||||
return [instance]
|
return [instance]
|
||||||
|
|
||||||
@ -96,7 +98,9 @@ def inherited_create(pk, klass, data):
|
|||||||
def data_compare(testcase, pk, klass, data):
|
def data_compare(testcase, pk, klass, data):
|
||||||
instance = klass.objects.get(id=pk)
|
instance = klass.objects.get(id=pk)
|
||||||
testcase.assertEqual(data, instance.data,
|
testcase.assertEqual(data, instance.data,
|
||||||
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (pk,data, type(data), instance.data, type(instance.data)))
|
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)" % (
|
||||||
|
pk, data, type(data), instance.data, type(instance.data))
|
||||||
|
)
|
||||||
|
|
||||||
def generic_compare(testcase, pk, klass, data):
|
def generic_compare(testcase, pk, klass, data):
|
||||||
instance = klass.objects.get(id=pk)
|
instance = klass.objects.get(id=pk)
|
||||||
@ -348,28 +352,16 @@ if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] != 'django.db.backends.mysql':
|
|||||||
|
|
||||||
# Dynamically create serializer tests to ensure that all
|
# Dynamically create serializer tests to ensure that all
|
||||||
# registered serializers are automatically tested.
|
# registered serializers are automatically tested.
|
||||||
class SerializerTests(unittest.TestCase):
|
class SerializerTests(TestCase):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def serializerTest(format, self):
|
def serializerTest(format, self):
|
||||||
# Clear the database first
|
|
||||||
management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
# Create all the objects defined in the test data
|
# Create all the objects defined in the test data
|
||||||
objects = []
|
objects = []
|
||||||
instance_count = {}
|
instance_count = {}
|
||||||
transaction.enter_transaction_management()
|
for (func, pk, klass, datum) in test_data:
|
||||||
try:
|
objects.extend(func[0](pk, klass, datum))
|
||||||
transaction.managed(True)
|
|
||||||
for (func, pk, klass, datum) in test_data:
|
|
||||||
objects.extend(func[0](pk, klass, datum))
|
|
||||||
instance_count[klass] = 0
|
|
||||||
transaction.commit()
|
|
||||||
except:
|
|
||||||
transaction.rollback()
|
|
||||||
transaction.leave_transaction_management()
|
|
||||||
raise
|
|
||||||
transaction.leave_transaction_management()
|
|
||||||
|
|
||||||
# Get a count of the number of objects created for each class
|
# Get a count of the number of objects created for each class
|
||||||
for klass in instance_count:
|
for klass in instance_count:
|
||||||
@ -381,19 +373,8 @@ def serializerTest(format, self):
|
|||||||
# Serialize the test database
|
# Serialize the test database
|
||||||
serialized_data = serializers.serialize(format, objects, indent=2)
|
serialized_data = serializers.serialize(format, objects, indent=2)
|
||||||
|
|
||||||
# Flush the database and recreate from the serialized data
|
for obj in serializers.deserialize(format, serialized_data):
|
||||||
management.call_command('flush', verbosity=0, interactive=False)
|
obj.save()
|
||||||
transaction.enter_transaction_management()
|
|
||||||
try:
|
|
||||||
transaction.managed(True)
|
|
||||||
for obj in serializers.deserialize(format, serialized_data):
|
|
||||||
obj.save()
|
|
||||||
transaction.commit()
|
|
||||||
except:
|
|
||||||
transaction.rollback()
|
|
||||||
transaction.leave_transaction_management()
|
|
||||||
raise
|
|
||||||
transaction.leave_transaction_management()
|
|
||||||
|
|
||||||
# Assert that the deserialized data is the same
|
# Assert that the deserialized data is the same
|
||||||
# as the original source
|
# as the original source
|
||||||
@ -406,10 +387,7 @@ def serializerTest(format, self):
|
|||||||
self.assertEquals(count, klass.objects.count())
|
self.assertEquals(count, klass.objects.count())
|
||||||
|
|
||||||
def fieldsTest(format, self):
|
def fieldsTest(format, self):
|
||||||
# Clear the database first
|
obj = ComplexModel(field1='first', field2='second', field3='third')
|
||||||
management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
obj = ComplexModel(field1='first',field2='second',field3='third')
|
|
||||||
obj.save_base(raw=True)
|
obj.save_base(raw=True)
|
||||||
|
|
||||||
# Serialize then deserialize the test database
|
# Serialize then deserialize the test database
|
||||||
@ -422,9 +400,6 @@ def fieldsTest(format, self):
|
|||||||
self.assertEqual(result.object.field3, 'third')
|
self.assertEqual(result.object.field3, 'third')
|
||||||
|
|
||||||
def streamTest(format, self):
|
def streamTest(format, self):
|
||||||
# Clear the database first
|
|
||||||
management.call_command('flush', verbosity=0, interactive=False)
|
|
||||||
|
|
||||||
obj = ComplexModel(field1='first',field2='second',field3='third')
|
obj = ComplexModel(field1='first',field2='second',field3='third')
|
||||||
obj.save_base(raw=True)
|
obj.save_base(raw=True)
|
||||||
|
|
||||||
@ -440,7 +415,7 @@ def streamTest(format, self):
|
|||||||
stream.close()
|
stream.close()
|
||||||
|
|
||||||
for format in serializers.get_serializer_formats():
|
for format in serializers.get_serializer_formats():
|
||||||
setattr(SerializerTests, 'test_'+format+'_serializer', curry(serializerTest, format))
|
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
|
||||||
setattr(SerializerTests, 'test_'+format+'_serializer_fields', curry(fieldsTest, format))
|
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
|
||||||
if format != 'python':
|
if format != 'python':
|
||||||
setattr(SerializerTests, 'test_'+format+'_serializer_stream', curry(streamTest, format))
|
setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
|
||||||
|
@ -88,11 +88,11 @@ u'0 minutes'
|
|||||||
u'0 minutes'
|
u'0 minutes'
|
||||||
|
|
||||||
# Timesince should work with both date objects (#9672)
|
# Timesince should work with both date objects (#9672)
|
||||||
>>> today = datetime.date.today()
|
>>> today = datetime.date.today()
|
||||||
>>> timeuntil(today+oneday, today)
|
>>> timeuntil(today+oneday, today)
|
||||||
u'1 day'
|
u'1 day'
|
||||||
>>> timeuntil(today-oneday, today)
|
>>> timeuntil(today-oneday, today)
|
||||||
u'0 minutes'
|
u'0 minutes'
|
||||||
>>> timeuntil(today+oneweek, today)
|
>>> timeuntil(today+oneweek, today)
|
||||||
u'1 week'
|
u'1 week'
|
||||||
"""
|
"""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user