mirror of
https://github.com/django/django.git
synced 2025-07-04 09:49:12 +00:00
git-svn-id: http://code.djangoproject.com/svn/django/branches/newforms-admin@7809 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
c349ba4cfc
commit
829fd5a967
2
AUTHORS
2
AUTHORS
@ -95,6 +95,7 @@ answer newbie questions, and generally made Django that much better:
|
|||||||
Sengtha Chay <sengtha@e-khmer.com>
|
Sengtha Chay <sengtha@e-khmer.com>
|
||||||
ivan.chelubeev@gmail.com
|
ivan.chelubeev@gmail.com
|
||||||
Bryan Chow <bryan at verdjn dot com>
|
Bryan Chow <bryan at verdjn dot com>
|
||||||
|
Antonis Christofides <anthony@itia.ntua.gr>
|
||||||
Michal Chruszcz <troll@pld-linux.org>
|
Michal Chruszcz <troll@pld-linux.org>
|
||||||
Can Burak Çilingir <canburak@cs.bilgi.edu.tr>
|
Can Burak Çilingir <canburak@cs.bilgi.edu.tr>
|
||||||
Ian Clelland <clelland@gmail.com>
|
Ian Clelland <clelland@gmail.com>
|
||||||
@ -195,6 +196,7 @@ answer newbie questions, and generally made Django that much better:
|
|||||||
jcrasta@gmail.com
|
jcrasta@gmail.com
|
||||||
jdetaeye
|
jdetaeye
|
||||||
Zak Johnson <zakj@nox.cx>
|
Zak Johnson <zakj@nox.cx>
|
||||||
|
Nis Jørgensen <nis@superlativ.dk>
|
||||||
Michael Josephson <http://www.sdjournal.com/>
|
Michael Josephson <http://www.sdjournal.com/>
|
||||||
jpellerin@gmail.com
|
jpellerin@gmail.com
|
||||||
junzhang.jn@gmail.com
|
junzhang.jn@gmail.com
|
||||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -8,6 +8,7 @@ from django.contrib import admin
|
|||||||
|
|
||||||
class GroupAdmin(admin.ModelAdmin):
|
class GroupAdmin(admin.ModelAdmin):
|
||||||
search_fields = ('name',)
|
search_fields = ('name',)
|
||||||
|
ordering = ('name',)
|
||||||
filter_horizontal = ('permissions',)
|
filter_horizontal = ('permissions',)
|
||||||
|
|
||||||
class UserAdmin(admin.ModelAdmin):
|
class UserAdmin(admin.ModelAdmin):
|
||||||
@ -21,6 +22,7 @@ class UserAdmin(admin.ModelAdmin):
|
|||||||
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff')
|
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff')
|
||||||
list_filter = ('is_staff', 'is_superuser')
|
list_filter = ('is_staff', 'is_superuser')
|
||||||
search_fields = ('username', 'first_name', 'last_name', 'email')
|
search_fields = ('username', 'first_name', 'last_name', 'email')
|
||||||
|
ordering = ('username',)
|
||||||
filter_horizontal = ('user_permissions',)
|
filter_horizontal = ('user_permissions',)
|
||||||
|
|
||||||
def add_view(self, request):
|
def add_view(self, request):
|
||||||
|
56
django/contrib/auth/fixtures/authtestdata.json
Normal file
56
django/contrib/auth/fixtures/authtestdata.json
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"pk": "1",
|
||||||
|
"model": "auth.user",
|
||||||
|
"fields": {
|
||||||
|
"username": "testclient",
|
||||||
|
"first_name": "Test",
|
||||||
|
"last_name": "Client",
|
||||||
|
"is_active": true,
|
||||||
|
"is_superuser": false,
|
||||||
|
"is_staff": false,
|
||||||
|
"last_login": "2006-12-17 07:03:31",
|
||||||
|
"groups": [],
|
||||||
|
"user_permissions": [],
|
||||||
|
"password": "sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161",
|
||||||
|
"email": "testclient@example.com",
|
||||||
|
"date_joined": "2006-12-17 07:03:31"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": "2",
|
||||||
|
"model": "auth.user",
|
||||||
|
"fields": {
|
||||||
|
"username": "inactive",
|
||||||
|
"first_name": "Inactive",
|
||||||
|
"last_name": "User",
|
||||||
|
"is_active": false,
|
||||||
|
"is_superuser": false,
|
||||||
|
"is_staff": false,
|
||||||
|
"last_login": "2006-12-17 07:03:31",
|
||||||
|
"groups": [],
|
||||||
|
"user_permissions": [],
|
||||||
|
"password": "sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161",
|
||||||
|
"email": "testclient@example.com",
|
||||||
|
"date_joined": "2006-12-17 07:03:31"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": "3",
|
||||||
|
"model": "auth.user",
|
||||||
|
"fields": {
|
||||||
|
"username": "staff",
|
||||||
|
"first_name": "Staff",
|
||||||
|
"last_name": "Member",
|
||||||
|
"is_active": true,
|
||||||
|
"is_superuser": false,
|
||||||
|
"is_staff": true,
|
||||||
|
"last_login": "2006-12-17 07:03:31",
|
||||||
|
"groups": [],
|
||||||
|
"user_permissions": [],
|
||||||
|
"password": "sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161",
|
||||||
|
"email": "staffmember@example.com",
|
||||||
|
"date_joined": "2006-12-17 07:03:31"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@ -96,7 +96,6 @@ class Group(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('group')
|
verbose_name = _('group')
|
||||||
verbose_name_plural = _('groups')
|
verbose_name_plural = _('groups')
|
||||||
ordering = ('name',)
|
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -150,7 +149,6 @@ class User(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _('user')
|
verbose_name = _('user')
|
||||||
verbose_name_plural = _('users')
|
verbose_name_plural = _('users')
|
||||||
ordering = ('username',)
|
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.username
|
return self.username
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from django.contrib.auth.tests.basic import BASIC_TESTS
|
from django.contrib.auth.tests.basic import BASIC_TESTS, PasswordResetTest
|
||||||
from django.contrib.auth.tests.forms import FORM_TESTS, PasswordResetFormTestCase
|
from django.contrib.auth.tests.forms import FORM_TESTS
|
||||||
|
|
||||||
__test__ = {
|
__test__ = {
|
||||||
'BASIC_TESTS': BASIC_TESTS,
|
'BASIC_TESTS': BASIC_TESTS,
|
||||||
'PASSWORDRESET_TESTS': PasswordResetFormTestCase,
|
'PASSWORDRESET_TESTS': PasswordResetTest,
|
||||||
'FORM_TESTS': FORM_TESTS,
|
'FORM_TESTS': FORM_TESTS,
|
||||||
}
|
}
|
||||||
|
@ -54,3 +54,24 @@ u'joe@somewhere.org'
|
|||||||
>>> u.password
|
>>> u.password
|
||||||
u'!'
|
u'!'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
from django.core import mail
|
||||||
|
|
||||||
|
class PasswordResetTest(TestCase):
|
||||||
|
fixtures = ['authtestdata.json']
|
||||||
|
urls = 'django.contrib.auth.urls'
|
||||||
|
|
||||||
|
def test_email_not_found(self):
|
||||||
|
"Error is raised if the provided email address isn't currently registered"
|
||||||
|
response = self.client.get('/password_reset/')
|
||||||
|
self.assertEquals(response.status_code, 200)
|
||||||
|
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
|
||||||
|
self.assertContains(response, "That e-mail address doesn't have an associated user account")
|
||||||
|
self.assertEquals(len(mail.outbox), 0)
|
||||||
|
|
||||||
|
def test_email_found(self):
|
||||||
|
"Email is sent if a valid email address is provided for password reset"
|
||||||
|
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
|
||||||
|
self.assertEquals(response.status_code, 302)
|
||||||
|
self.assertEquals(len(mail.outbox), 1)
|
||||||
|
@ -1,33 +1,4 @@
|
|||||||
|
|
||||||
from django.core import mail
|
|
||||||
from django.test import TestCase
|
|
||||||
from django.contrib.auth.models import User
|
|
||||||
from django.contrib.auth.forms import PasswordResetForm
|
|
||||||
|
|
||||||
class PasswordResetFormTestCase(TestCase):
|
|
||||||
def testValidUser(self):
|
|
||||||
data = {
|
|
||||||
'email': 'nonexistent@example.com',
|
|
||||||
}
|
|
||||||
form = PasswordResetForm(data)
|
|
||||||
self.assertEqual(form.is_valid(), False)
|
|
||||||
self.assertEqual(form["email"].errors, [u"That e-mail address doesn't have an associated user account. Are you sure you've registered?"])
|
|
||||||
|
|
||||||
def testEmail(self):
|
|
||||||
# TODO: remove my email address from the test ;)
|
|
||||||
User.objects.create_user('atestuser', 'atestuser@example.com', 'test789')
|
|
||||||
data = {
|
|
||||||
'email': 'atestuser@example.com',
|
|
||||||
}
|
|
||||||
form = PasswordResetForm(data)
|
|
||||||
self.assertEqual(form.is_valid(), True)
|
|
||||||
# TODO: look at why using contrib.sites breaks other tests
|
|
||||||
form.save(domain_override="example.com")
|
|
||||||
self.assertEqual(len(mail.outbox), 1)
|
|
||||||
self.assertEqual(mail.outbox[0].subject, u'Password reset on example.com')
|
|
||||||
# TODO: test mail body. need to figure out a way to get the password in plain text
|
|
||||||
# self.assertEqual(mail.outbox[0].body, '')
|
|
||||||
|
|
||||||
FORM_TESTS = """
|
FORM_TESTS = """
|
||||||
>>> from django.contrib.auth.models import User
|
>>> from django.contrib.auth.models import User
|
||||||
>>> from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
|
>>> from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
|
||||||
|
13
django/contrib/auth/urls.py
Normal file
13
django/contrib/auth/urls.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# These URLs are normally mapped to /admin/urls.py. This URLs file is
|
||||||
|
# provided as a convenience to those who want to deploy these URLs elsewhere.
|
||||||
|
# This file is also used to provide a reliable view deployment for test purposes.
|
||||||
|
|
||||||
|
from django.conf.urls.defaults import *
|
||||||
|
|
||||||
|
urlpatterns = patterns('',
|
||||||
|
('^logout/$', 'django.contrib.auth.views.logout'),
|
||||||
|
('^password_change/$', 'django.contrib.auth.views.password_change'),
|
||||||
|
('^password_change/done/$', 'django.contrib.auth.views.password_change_done'),
|
||||||
|
('^password_reset/$', 'django.contrib.auth.views.password_reset')
|
||||||
|
)
|
||||||
|
|
@ -8,7 +8,7 @@ class FlatPage(models.Model):
|
|||||||
url = models.CharField(_('URL'), max_length=100, validator_list=[validators.isAlphaNumericURL], db_index=True,
|
url = models.CharField(_('URL'), max_length=100, validator_list=[validators.isAlphaNumericURL], db_index=True,
|
||||||
help_text=_("Example: '/about/contact/'. Make sure to have leading and trailing slashes."))
|
help_text=_("Example: '/about/contact/'. Make sure to have leading and trailing slashes."))
|
||||||
title = models.CharField(_('title'), max_length=200)
|
title = models.CharField(_('title'), max_length=200)
|
||||||
content = models.TextField(_('content'))
|
content = models.TextField(_('content'), blank=True)
|
||||||
enable_comments = models.BooleanField(_('enable comments'))
|
enable_comments = models.BooleanField(_('enable comments'))
|
||||||
template_name = models.CharField(_('template name'), max_length=70, blank=True,
|
template_name = models.CharField(_('template name'), max_length=70, blank=True,
|
||||||
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
|
help_text=_("Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'."))
|
||||||
|
@ -21,18 +21,14 @@ class TestForm(forms.Form):
|
|||||||
|
|
||||||
|
|
||||||
class PreviewTests(TestCase):
|
class PreviewTests(TestCase):
|
||||||
|
urls = 'django.contrib.formtools.test_urls'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self._old_root_urlconf = settings.ROOT_URLCONF
|
|
||||||
settings.ROOT_URLCONF = 'django.contrib.formtools.test_urls'
|
|
||||||
# Create a FormPreview instance to share between tests
|
# Create a FormPreview instance to share between tests
|
||||||
self.preview = preview.FormPreview(TestForm)
|
self.preview = preview.FormPreview(TestForm)
|
||||||
input_template = '<input type="hidden" name="%s" value="%s" />'
|
input_template = '<input type="hidden" name="%s" value="%s" />'
|
||||||
self.input = input_template % (self.preview.unused_name('stage'), "%d")
|
self.input = input_template % (self.preview.unused_name('stage'), "%d")
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
settings.ROOT_URLCONF = self._old_root_urlconf
|
|
||||||
|
|
||||||
def test_unused_name(self):
|
def test_unused_name(self):
|
||||||
"""
|
"""
|
||||||
Verifies name mangling to get uniue field name.
|
Verifies name mangling to get uniue field name.
|
||||||
|
@ -21,10 +21,10 @@ class Command(LabelCommand):
|
|||||||
for f in fields:
|
for f in fields:
|
||||||
field_output = [qn(f.name), f.db_type()]
|
field_output = [qn(f.name), f.db_type()]
|
||||||
field_output.append("%sNULL" % (not f.null and "NOT " or ""))
|
field_output.append("%sNULL" % (not f.null and "NOT " or ""))
|
||||||
if f.unique:
|
|
||||||
field_output.append("UNIQUE")
|
|
||||||
if f.primary_key:
|
if f.primary_key:
|
||||||
field_output.append("PRIMARY KEY")
|
field_output.append("PRIMARY KEY")
|
||||||
|
elif f.unique:
|
||||||
|
field_output.append("UNIQUE")
|
||||||
if f.db_index:
|
if f.db_index:
|
||||||
unique = f.unique and "UNIQUE " or ""
|
unique = f.unique and "UNIQUE " or ""
|
||||||
index_output.append("CREATE %sINDEX %s_%s ON %s (%s);" % \
|
index_output.append("CREATE %sINDEX %s_%s ON %s (%s);" % \
|
||||||
|
@ -162,3 +162,9 @@ class Command(BaseCommand):
|
|||||||
else:
|
else:
|
||||||
if verbosity > 0:
|
if verbosity > 0:
|
||||||
print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
|
print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
|
||||||
|
|
||||||
|
# Close the DB connection. This is required as a workaround for an
|
||||||
|
# edge case in MySQL: if the same connection is used to
|
||||||
|
# create tables, load data, and query, the query can return
|
||||||
|
# incorrect results. See Django #7572, MySQL #37735.
|
||||||
|
connection.close()
|
||||||
|
@ -268,11 +268,11 @@ def sql_model_create(model, style, known_models=set()):
|
|||||||
field_output = [style.SQL_FIELD(qn(f.column)),
|
field_output = [style.SQL_FIELD(qn(f.column)),
|
||||||
style.SQL_COLTYPE(col_type)]
|
style.SQL_COLTYPE(col_type)]
|
||||||
field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
|
field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
|
||||||
if f.unique and (not f.primary_key or connection.features.allows_unique_and_pk):
|
|
||||||
field_output.append(style.SQL_KEYWORD('UNIQUE'))
|
|
||||||
if f.primary_key:
|
if f.primary_key:
|
||||||
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
|
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
|
||||||
if tablespace and connection.features.supports_tablespaces and (f.unique or f.primary_key) and connection.features.autoindexes_primary_keys:
|
elif f.unique:
|
||||||
|
field_output.append(style.SQL_KEYWORD('UNIQUE'))
|
||||||
|
if tablespace and connection.features.supports_tablespaces and f.unique:
|
||||||
# We must specify the index tablespace inline, because we
|
# We must specify the index tablespace inline, because we
|
||||||
# won't be generating a CREATE INDEX statement for this field.
|
# won't be generating a CREATE INDEX statement for this field.
|
||||||
field_output.append(connection.ops.tablespace_sql(tablespace, inline=True))
|
field_output.append(connection.ops.tablespace_sql(tablespace, inline=True))
|
||||||
@ -355,7 +355,7 @@ def many_to_many_sql_for_model(model, style):
|
|||||||
for f in opts.local_many_to_many:
|
for f in opts.local_many_to_many:
|
||||||
if not isinstance(f.rel, generic.GenericRel):
|
if not isinstance(f.rel, generic.GenericRel):
|
||||||
tablespace = f.db_tablespace or opts.db_tablespace
|
tablespace = f.db_tablespace or opts.db_tablespace
|
||||||
if tablespace and connection.features.supports_tablespaces and connection.features.autoindexes_primary_keys:
|
if tablespace and connection.features.supports_tablespaces:
|
||||||
tablespace_sql = ' ' + connection.ops.tablespace_sql(tablespace, inline=True)
|
tablespace_sql = ' ' + connection.ops.tablespace_sql(tablespace, inline=True)
|
||||||
else:
|
else:
|
||||||
tablespace_sql = ''
|
tablespace_sql = ''
|
||||||
@ -460,15 +460,14 @@ def sql_indexes_for_model(model, style):
|
|||||||
|
|
||||||
qn = connection.ops.quote_name
|
qn = connection.ops.quote_name
|
||||||
for f in model._meta.local_fields:
|
for f in model._meta.local_fields:
|
||||||
if f.db_index and not ((f.primary_key or f.unique) and connection.features.autoindexes_primary_keys):
|
if f.db_index and not f.unique:
|
||||||
unique = f.unique and 'UNIQUE ' or ''
|
|
||||||
tablespace = f.db_tablespace or model._meta.db_tablespace
|
tablespace = f.db_tablespace or model._meta.db_tablespace
|
||||||
if tablespace and connection.features.supports_tablespaces:
|
if tablespace and connection.features.supports_tablespaces:
|
||||||
tablespace_sql = ' ' + connection.ops.tablespace_sql(tablespace)
|
tablespace_sql = ' ' + connection.ops.tablespace_sql(tablespace)
|
||||||
else:
|
else:
|
||||||
tablespace_sql = ''
|
tablespace_sql = ''
|
||||||
output.append(
|
output.append(
|
||||||
style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
|
style.SQL_KEYWORD('CREATE INDEX') + ' ' + \
|
||||||
style.SQL_TABLE(qn('%s_%s' % (model._meta.db_table, f.column))) + ' ' + \
|
style.SQL_TABLE(qn('%s_%s' % (model._meta.db_table, f.column))) + ' ' + \
|
||||||
style.SQL_KEYWORD('ON') + ' ' + \
|
style.SQL_KEYWORD('ON') + ' ' + \
|
||||||
style.SQL_TABLE(qn(model._meta.db_table)) + ' ' + \
|
style.SQL_TABLE(qn(model._meta.db_table)) + ' ' + \
|
||||||
|
@ -551,6 +551,9 @@ class WSGIRequestHandler(BaseHTTPRequestHandler):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
|
self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
|
||||||
|
# We set self.path to avoid crashes in log_message() on unsupported
|
||||||
|
# requests (like "OPTIONS").
|
||||||
|
self.path = ''
|
||||||
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
def get_environ(self):
|
def get_environ(self):
|
||||||
|
@ -40,6 +40,7 @@ Optional Fcgi settings: (setting=value)
|
|||||||
workdir=DIRECTORY change to this directory when daemonizing.
|
workdir=DIRECTORY change to this directory when daemonizing.
|
||||||
outlog=FILE write stdout to this file.
|
outlog=FILE write stdout to this file.
|
||||||
errlog=FILE write stderr to this file.
|
errlog=FILE write stderr to this file.
|
||||||
|
umask=UMASK umask to use when daemonizing (default 022).
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
Run a "standard" fastcgi process on a file-descriptor
|
Run a "standard" fastcgi process on a file-descriptor
|
||||||
@ -73,6 +74,7 @@ FASTCGI_OPTIONS = {
|
|||||||
'maxrequests': 0,
|
'maxrequests': 0,
|
||||||
'outlog': None,
|
'outlog': None,
|
||||||
'errlog': None,
|
'errlog': None,
|
||||||
|
'umask': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
def fastcgi_help(message=None):
|
def fastcgi_help(message=None):
|
||||||
@ -159,6 +161,8 @@ def runfastcgi(argset=[], **kwargs):
|
|||||||
daemon_kwargs['out_log'] = options['outlog']
|
daemon_kwargs['out_log'] = options['outlog']
|
||||||
if options['errlog']:
|
if options['errlog']:
|
||||||
daemon_kwargs['err_log'] = options['errlog']
|
daemon_kwargs['err_log'] = options['errlog']
|
||||||
|
if options['umask']:
|
||||||
|
daemon_kwargs['umask'] = int(options['umask'])
|
||||||
|
|
||||||
if daemonize:
|
if daemonize:
|
||||||
from django.utils.daemonize import become_daemon
|
from django.utils.daemonize import become_daemon
|
||||||
|
@ -296,3 +296,8 @@ def reverse(viewname, urlconf=None, args=None, kwargs=None):
|
|||||||
kwargs = kwargs or {}
|
kwargs = kwargs or {}
|
||||||
return iri_to_uri(u'/' + get_resolver(urlconf).reverse(viewname, *args, **kwargs))
|
return iri_to_uri(u'/' + get_resolver(urlconf).reverse(viewname, *args, **kwargs))
|
||||||
|
|
||||||
|
def clear_url_caches():
|
||||||
|
global _resolver_cache
|
||||||
|
global _callable_cache
|
||||||
|
_resolver_cache.clear()
|
||||||
|
_callable_cache.clear()
|
||||||
|
@ -41,8 +41,6 @@ class BaseDatabaseWrapper(local):
|
|||||||
|
|
||||||
class BaseDatabaseFeatures(object):
|
class BaseDatabaseFeatures(object):
|
||||||
allows_group_by_ordinal = True
|
allows_group_by_ordinal = True
|
||||||
allows_unique_and_pk = True
|
|
||||||
autoindexes_primary_keys = True
|
|
||||||
inline_fk_references = True
|
inline_fk_references = True
|
||||||
needs_datetime_string_cast = True
|
needs_datetime_string_cast = True
|
||||||
supports_constraints = True
|
supports_constraints = True
|
||||||
|
@ -60,7 +60,6 @@ server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
|
|||||||
# TRADITIONAL will automatically cause most warnings to be treated as errors.
|
# TRADITIONAL will automatically cause most warnings to be treated as errors.
|
||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
autoindexes_primary_keys = False
|
|
||||||
inline_fk_references = False
|
inline_fk_references = False
|
||||||
empty_fetchmany_value = ()
|
empty_fetchmany_value = ()
|
||||||
update_can_self_select = False
|
update_can_self_select = False
|
||||||
@ -136,7 +135,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
features = DatabaseFeatures()
|
features = DatabaseFeatures()
|
||||||
ops = DatabaseOperations()
|
ops = DatabaseOperations()
|
||||||
operators = {
|
operators = {
|
||||||
'exact': '= %s',
|
'exact': '= BINARY %s',
|
||||||
'iexact': 'LIKE %s',
|
'iexact': 'LIKE %s',
|
||||||
'contains': 'LIKE BINARY %s',
|
'contains': 'LIKE BINARY %s',
|
||||||
'icontains': 'LIKE %s',
|
'icontains': 'LIKE %s',
|
||||||
|
@ -64,7 +64,6 @@ class MysqlDebugWrapper:
|
|||||||
return getattr(self.cursor, attr)
|
return getattr(self.cursor, attr)
|
||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
autoindexes_primary_keys = False
|
|
||||||
inline_fk_references = False
|
inline_fk_references = False
|
||||||
empty_fetchmany_value = ()
|
empty_fetchmany_value = ()
|
||||||
update_can_self_select = False
|
update_can_self_select = False
|
||||||
@ -140,7 +139,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||||||
features = DatabaseFeatures()
|
features = DatabaseFeatures()
|
||||||
ops = DatabaseOperations()
|
ops = DatabaseOperations()
|
||||||
operators = {
|
operators = {
|
||||||
'exact': '= %s',
|
'exact': '= BINARY %s',
|
||||||
'iexact': 'LIKE %s',
|
'iexact': 'LIKE %s',
|
||||||
'contains': 'LIKE BINARY %s',
|
'contains': 'LIKE BINARY %s',
|
||||||
'icontains': 'LIKE %s',
|
'icontains': 'LIKE %s',
|
||||||
|
@ -24,7 +24,6 @@ IntegrityError = Database.IntegrityError
|
|||||||
|
|
||||||
class DatabaseFeatures(BaseDatabaseFeatures):
|
class DatabaseFeatures(BaseDatabaseFeatures):
|
||||||
allows_group_by_ordinal = False
|
allows_group_by_ordinal = False
|
||||||
allows_unique_and_pk = False # Suppress UNIQUE/PK for Oracle (ORA-02259)
|
|
||||||
empty_fetchmany_value = ()
|
empty_fetchmany_value = ()
|
||||||
needs_datetime_string_cast = False
|
needs_datetime_string_cast = False
|
||||||
supports_tablespaces = True
|
supports_tablespaces = True
|
||||||
|
@ -23,7 +23,7 @@ DATA_TYPES = {
|
|||||||
'ImageField': 'NVARCHAR2(%(max_length)s)',
|
'ImageField': 'NVARCHAR2(%(max_length)s)',
|
||||||
'IntegerField': 'NUMBER(11)',
|
'IntegerField': 'NUMBER(11)',
|
||||||
'IPAddressField': 'VARCHAR2(15)',
|
'IPAddressField': 'VARCHAR2(15)',
|
||||||
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(column)s IS NULL))',
|
'NullBooleanField': 'NUMBER(1) CHECK ((%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL))',
|
||||||
'OneToOneField': 'NUMBER(11)',
|
'OneToOneField': 'NUMBER(11)',
|
||||||
'PhoneNumberField': 'VARCHAR2(20)',
|
'PhoneNumberField': 'VARCHAR2(20)',
|
||||||
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
|
'PositiveIntegerField': 'NUMBER(11) CHECK (%(qn_column)s >= 0)',
|
||||||
|
@ -97,7 +97,7 @@ class DatabaseOperations(BaseDatabaseOperations):
|
|||||||
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
|
# Use `coalesce` to set the sequence for each model to the max pk value if there are records,
|
||||||
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
|
# or 1 if there are none. Set the `is_called` property (the third argument to `setval`) to true
|
||||||
# if there are records (as the max pk value is already in use), otherwise set it to false.
|
# if there are records (as the max pk value is already in use), otherwise set it to false.
|
||||||
for f in model._meta.fields:
|
for f in model._meta.local_fields:
|
||||||
if isinstance(f, models.AutoField):
|
if isinstance(f, models.AutoField):
|
||||||
output.append("%s setval('%s', coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
output.append("%s setval('%s', coalesce(max(%s), 1), max(%s) %s null) %s %s;" % \
|
||||||
(style.SQL_KEYWORD('SELECT'),
|
(style.SQL_KEYWORD('SELECT'),
|
||||||
|
@ -50,7 +50,15 @@ class ModelBase(type):
|
|||||||
meta = attr_meta
|
meta = attr_meta
|
||||||
base_meta = getattr(new_class, '_meta', None)
|
base_meta = getattr(new_class, '_meta', None)
|
||||||
|
|
||||||
new_class.add_to_class('_meta', Options(meta))
|
if getattr(meta, 'app_label', None) is None:
|
||||||
|
# Figure out the app_label by looking one level up.
|
||||||
|
# For 'django.contrib.sites.models', this would be 'sites'.
|
||||||
|
model_module = sys.modules[new_class.__module__]
|
||||||
|
kwargs = {"app_label": model_module.__name__.split('.')[-2]}
|
||||||
|
else:
|
||||||
|
kwargs = {}
|
||||||
|
|
||||||
|
new_class.add_to_class('_meta', Options(meta, **kwargs))
|
||||||
if not abstract:
|
if not abstract:
|
||||||
new_class.add_to_class('DoesNotExist',
|
new_class.add_to_class('DoesNotExist',
|
||||||
subclass_exception('DoesNotExist', ObjectDoesNotExist, module))
|
subclass_exception('DoesNotExist', ObjectDoesNotExist, module))
|
||||||
@ -71,11 +79,6 @@ class ModelBase(type):
|
|||||||
if new_class._default_manager.model._meta.abstract:
|
if new_class._default_manager.model._meta.abstract:
|
||||||
old_default_mgr = new_class._default_manager
|
old_default_mgr = new_class._default_manager
|
||||||
new_class._default_manager = None
|
new_class._default_manager = None
|
||||||
if getattr(new_class._meta, 'app_label', None) is None:
|
|
||||||
# Figure out the app_label by looking one level up.
|
|
||||||
# For 'django.contrib.sites.models', this would be 'sites'.
|
|
||||||
model_module = sys.modules[new_class.__module__]
|
|
||||||
new_class._meta.app_label = model_module.__name__.split('.')[-2]
|
|
||||||
|
|
||||||
# Bail out early if we have already created this class.
|
# Bail out early if we have already created this class.
|
||||||
m = get_model(new_class._meta.app_label, name, False)
|
m = get_model(new_class._meta.app_label, name, False)
|
||||||
@ -389,6 +392,21 @@ class Model(object):
|
|||||||
for sub_obj in getattr(self, rel_opts_name).all():
|
for sub_obj in getattr(self, rel_opts_name).all():
|
||||||
sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null)
|
sub_obj._collect_sub_objects(seen_objs, self.__class__, related.field.null)
|
||||||
|
|
||||||
|
# Handle any ancestors (for the model-inheritance case). We do this by
|
||||||
|
# traversing to the most remote parent classes -- those with no parents
|
||||||
|
# themselves -- and then adding those instances to the collection. That
|
||||||
|
# will include all the child instances down to "self".
|
||||||
|
parent_stack = self._meta.parents.values()
|
||||||
|
while parent_stack:
|
||||||
|
link = parent_stack.pop()
|
||||||
|
parent_obj = getattr(self, link.name)
|
||||||
|
if parent_obj._meta.parents:
|
||||||
|
parent_stack.extend(parent_obj._meta.parents.values())
|
||||||
|
continue
|
||||||
|
# At this point, parent_obj is base class (no ancestor models). So
|
||||||
|
# delete it and all its descendents.
|
||||||
|
parent_obj._collect_sub_objects(seen_objs)
|
||||||
|
|
||||||
def delete(self):
|
def delete(self):
|
||||||
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
assert self._get_pk_val() is not None, "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname)
|
||||||
|
|
||||||
@ -436,7 +454,7 @@ class Model(object):
|
|||||||
|
|
||||||
def _get_FIELD_filename(self, field):
|
def _get_FIELD_filename(self, field):
|
||||||
if getattr(self, field.attname): # value is not blank
|
if getattr(self, field.attname): # value is not blank
|
||||||
return os.path.join(settings.MEDIA_ROOT, getattr(self, field.attname))
|
return os.path.normpath(os.path.join(settings.MEDIA_ROOT, getattr(self, field.attname)))
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
def _get_FIELD_url(self, field):
|
def _get_FIELD_url(self, field):
|
||||||
|
@ -85,7 +85,7 @@ class Field(object):
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.verbose_name = verbose_name
|
self.verbose_name = verbose_name
|
||||||
self.primary_key = primary_key
|
self.primary_key = primary_key
|
||||||
self.max_length, self.unique = max_length, unique
|
self.max_length, self._unique = max_length, unique
|
||||||
self.blank, self.null = blank, null
|
self.blank, self.null = blank, null
|
||||||
# Oracle treats the empty string ('') as null, so coerce the null
|
# Oracle treats the empty string ('') as null, so coerce the null
|
||||||
# option whenever '' is a possible value.
|
# option whenever '' is a possible value.
|
||||||
@ -160,6 +160,10 @@ class Field(object):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def unique(self):
|
||||||
|
return self._unique or self.primary_key
|
||||||
|
unique = property(unique)
|
||||||
|
|
||||||
def validate_full(self, field_data, all_data):
|
def validate_full(self, field_data, all_data):
|
||||||
"""
|
"""
|
||||||
Returns a list of errors for this field. This is the main interface,
|
Returns a list of errors for this field. This is the main interface,
|
||||||
@ -676,7 +680,7 @@ class DecimalField(Field):
|
|||||||
_("This value must be a decimal number."))
|
_("This value must be a decimal number."))
|
||||||
|
|
||||||
def _format(self, value):
|
def _format(self, value):
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring) or value is None:
|
||||||
return value
|
return value
|
||||||
else:
|
else:
|
||||||
return self.format_number(value)
|
return self.format_number(value)
|
||||||
@ -697,8 +701,7 @@ class DecimalField(Field):
|
|||||||
return u"%.*f" % (self.decimal_places, value)
|
return u"%.*f" % (self.decimal_places, value)
|
||||||
|
|
||||||
def get_db_prep_save(self, value):
|
def get_db_prep_save(self, value):
|
||||||
if value is not None:
|
value = self._format(value)
|
||||||
value = self._format(value)
|
|
||||||
return super(DecimalField, self).get_db_prep_save(value)
|
return super(DecimalField, self).get_db_prep_save(value)
|
||||||
|
|
||||||
def get_db_prep_lookup(self, lookup_type, value):
|
def get_db_prep_lookup(self, lookup_type, value):
|
||||||
@ -1151,12 +1154,3 @@ class XMLField(TextField):
|
|||||||
def get_manipulator_field_objs(self):
|
def get_manipulator_field_objs(self):
|
||||||
return [curry(oldforms.XMLLargeTextField, schema_path=self.schema_path)]
|
return [curry(oldforms.XMLLargeTextField, schema_path=self.schema_path)]
|
||||||
|
|
||||||
class OrderingField(IntegerField):
|
|
||||||
empty_strings_allowed=False
|
|
||||||
def __init__(self, with_respect_to, **kwargs):
|
|
||||||
self.wrt = with_respect_to
|
|
||||||
kwargs['null'] = True
|
|
||||||
IntegerField.__init__(self, **kwargs )
|
|
||||||
|
|
||||||
def get_manipulator_fields(self, opts, manipulator, change, name_prefix='', rel=False, follow=True):
|
|
||||||
return [oldforms.HiddenField(name_prefix + self.name)]
|
|
||||||
|
@ -322,7 +322,9 @@ class ForeignRelatedObjectsDescriptor(object):
|
|||||||
clear.alters_data = True
|
clear.alters_data = True
|
||||||
|
|
||||||
manager = RelatedManager()
|
manager = RelatedManager()
|
||||||
manager.core_filters = {'%s__pk' % rel_field.name: getattr(instance, rel_field.rel.get_related_field().attname)}
|
attname = rel_field.rel.get_related_field().name
|
||||||
|
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
|
||||||
|
getattr(instance, attname)}
|
||||||
manager.model = self.related.model
|
manager.model = self.related.model
|
||||||
|
|
||||||
return manager
|
return manager
|
||||||
@ -670,6 +672,11 @@ class ForeignKey(RelatedField, Field):
|
|||||||
def contribute_to_class(self, cls, name):
|
def contribute_to_class(self, cls, name):
|
||||||
super(ForeignKey, self).contribute_to_class(cls, name)
|
super(ForeignKey, self).contribute_to_class(cls, name)
|
||||||
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
|
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
|
||||||
|
if isinstance(self.rel.to, basestring):
|
||||||
|
target = self.rel.to
|
||||||
|
else:
|
||||||
|
target = self.rel.to._meta.db_table
|
||||||
|
cls._meta.duplicate_targets[self.column] = (target, "o2m")
|
||||||
|
|
||||||
def contribute_to_related_class(self, cls, related):
|
def contribute_to_related_class(self, cls, related):
|
||||||
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
|
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
|
||||||
@ -791,6 +798,12 @@ class ManyToManyField(RelatedField, Field):
|
|||||||
# Set up the accessor for the m2m table name for the relation
|
# Set up the accessor for the m2m table name for the relation
|
||||||
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
|
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
|
||||||
|
|
||||||
|
if isinstance(self.rel.to, basestring):
|
||||||
|
target = self.rel.to
|
||||||
|
else:
|
||||||
|
target = self.rel.to._meta.db_table
|
||||||
|
cls._meta.duplicate_targets[self.column] = (target, "m2m")
|
||||||
|
|
||||||
def contribute_to_related_class(self, cls, related):
|
def contribute_to_related_class(self, cls, related):
|
||||||
# m2m relations to self do not have a ManyRelatedObjectsDescriptor,
|
# m2m relations to self do not have a ManyRelatedObjectsDescriptor,
|
||||||
# as it would be redundant - unless the field is non-symmetrical.
|
# as it would be redundant - unless the field is non-symmetrical.
|
||||||
|
@ -24,7 +24,7 @@ DEFAULT_NAMES = ('verbose_name', 'db_table', 'ordering',
|
|||||||
'abstract')
|
'abstract')
|
||||||
|
|
||||||
class Options(object):
|
class Options(object):
|
||||||
def __init__(self, meta):
|
def __init__(self, meta, app_label=None):
|
||||||
self.local_fields, self.local_many_to_many = [], []
|
self.local_fields, self.local_many_to_many = [], []
|
||||||
self.module_name, self.verbose_name = None, None
|
self.module_name, self.verbose_name = None, None
|
||||||
self.verbose_name_plural = None
|
self.verbose_name_plural = None
|
||||||
@ -32,7 +32,7 @@ class Options(object):
|
|||||||
self.ordering = []
|
self.ordering = []
|
||||||
self.unique_together = []
|
self.unique_together = []
|
||||||
self.permissions = []
|
self.permissions = []
|
||||||
self.object_name, self.app_label = None, None
|
self.object_name, self.app_label = None, app_label
|
||||||
self.get_latest_by = None
|
self.get_latest_by = None
|
||||||
self.order_with_respect_to = None
|
self.order_with_respect_to = None
|
||||||
self.db_tablespace = settings.DEFAULT_TABLESPACE
|
self.db_tablespace = settings.DEFAULT_TABLESPACE
|
||||||
@ -43,8 +43,12 @@ class Options(object):
|
|||||||
self.one_to_one_field = None
|
self.one_to_one_field = None
|
||||||
self.abstract = False
|
self.abstract = False
|
||||||
self.parents = SortedDict()
|
self.parents = SortedDict()
|
||||||
|
self.duplicate_targets = {}
|
||||||
|
|
||||||
def contribute_to_class(self, cls, name):
|
def contribute_to_class(self, cls, name):
|
||||||
|
from django.db import connection
|
||||||
|
from django.db.backends.util import truncate_name
|
||||||
|
|
||||||
cls._meta = self
|
cls._meta = self
|
||||||
self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS
|
self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS
|
||||||
# First, construct the default values for these options.
|
# First, construct the default values for these options.
|
||||||
@ -86,9 +90,13 @@ class Options(object):
|
|||||||
self.verbose_name_plural = string_concat(self.verbose_name, 's')
|
self.verbose_name_plural = string_concat(self.verbose_name, 's')
|
||||||
del self.meta
|
del self.meta
|
||||||
|
|
||||||
|
# If the db_table wasn't provided, use the app_label + module_name.
|
||||||
|
if not self.db_table:
|
||||||
|
self.db_table = "%s_%s" % (self.app_label, self.module_name)
|
||||||
|
self.db_table = truncate_name(self.db_table, connection.ops.max_name_length())
|
||||||
|
|
||||||
|
|
||||||
def _prepare(self, model):
|
def _prepare(self, model):
|
||||||
from django.db import connection
|
|
||||||
from django.db.backends.util import truncate_name
|
|
||||||
if self.order_with_respect_to:
|
if self.order_with_respect_to:
|
||||||
self.order_with_respect_to = self.get_field(self.order_with_respect_to)
|
self.order_with_respect_to = self.get_field(self.order_with_respect_to)
|
||||||
self.ordering = ('_order',)
|
self.ordering = ('_order',)
|
||||||
@ -107,10 +115,23 @@ class Options(object):
|
|||||||
auto_created=True)
|
auto_created=True)
|
||||||
model.add_to_class('id', auto)
|
model.add_to_class('id', auto)
|
||||||
|
|
||||||
# If the db_table wasn't provided, use the app_label + module_name.
|
# Determine any sets of fields that are pointing to the same targets
|
||||||
if not self.db_table:
|
# (e.g. two ForeignKeys to the same remote model). The query
|
||||||
self.db_table = "%s_%s" % (self.app_label, self.module_name)
|
# construction code needs to know this. At the end of this,
|
||||||
self.db_table = truncate_name(self.db_table, connection.ops.max_name_length())
|
# self.duplicate_targets will map each duplicate field column to the
|
||||||
|
# columns it duplicates.
|
||||||
|
collections = {}
|
||||||
|
for column, target in self.duplicate_targets.iteritems():
|
||||||
|
try:
|
||||||
|
collections[target].add(column)
|
||||||
|
except KeyError:
|
||||||
|
collections[target] = set([column])
|
||||||
|
self.duplicate_targets = {}
|
||||||
|
for elt in collections.itervalues():
|
||||||
|
if len(elt) == 1:
|
||||||
|
continue
|
||||||
|
for column in elt:
|
||||||
|
self.duplicate_targets[column] = elt.difference(set([column]))
|
||||||
|
|
||||||
def add_field(self, field):
|
def add_field(self, field):
|
||||||
# Insert the given field in the order in which it was created, using
|
# Insert the given field in the order in which it was created, using
|
||||||
|
@ -3,7 +3,7 @@ import warnings
|
|||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import connection, transaction, IntegrityError
|
from django.db import connection, transaction, IntegrityError
|
||||||
from django.db.models.fields import DateField, FieldDoesNotExist
|
from django.db.models.fields import DateField, FieldDoesNotExist
|
||||||
from django.db.models.query_utils import Q
|
from django.db.models.query_utils import Q, select_related_descend
|
||||||
from django.db.models import signals, sql
|
from django.db.models import signals, sql
|
||||||
from django.dispatch import dispatcher
|
from django.dispatch import dispatcher
|
||||||
from django.utils.datastructures import SortedDict
|
from django.utils.datastructures import SortedDict
|
||||||
@ -761,8 +761,7 @@ def get_cached_row(klass, row, index_start, max_depth=0, cur_depth=0,
|
|||||||
index_end = index_start + len(klass._meta.fields)
|
index_end = index_start + len(klass._meta.fields)
|
||||||
obj = klass(*row[index_start:index_end])
|
obj = klass(*row[index_start:index_end])
|
||||||
for f in klass._meta.fields:
|
for f in klass._meta.fields:
|
||||||
if (not f.rel or (not restricted and f.null) or
|
if not select_related_descend(f, restricted, requested):
|
||||||
(restricted and f.name not in requested) or f.rel.parent_link):
|
|
||||||
continue
|
continue
|
||||||
if restricted:
|
if restricted:
|
||||||
next = requested[f.name]
|
next = requested[f.name]
|
||||||
|
@ -48,3 +48,20 @@ class Q(tree.Node):
|
|||||||
obj.negate()
|
obj.negate()
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
def select_related_descend(field, restricted, requested):
|
||||||
|
"""
|
||||||
|
Returns True if this field should be used to descend deeper for
|
||||||
|
select_related() purposes. Used by both the query construction code
|
||||||
|
(sql.query.fill_related_selections()) and the model instance creation code
|
||||||
|
(query.get_cached_row()).
|
||||||
|
"""
|
||||||
|
if not field.rel:
|
||||||
|
return False
|
||||||
|
if field.rel.parent_link:
|
||||||
|
return False
|
||||||
|
if restricted and field.name not in requested:
|
||||||
|
return False
|
||||||
|
if not restricted and field.null:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ databases). The abstraction barrier only works one way: this module has to know
|
|||||||
all about the internals of models in order to get the information it needs.
|
all about the internals of models in order to get the information it needs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import datetime
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from django.utils.tree import Node
|
from django.utils.tree import Node
|
||||||
@ -14,9 +15,10 @@ from django.utils.datastructures import SortedDict
|
|||||||
from django.dispatch import dispatcher
|
from django.dispatch import dispatcher
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.db.models import signals
|
from django.db.models import signals
|
||||||
|
from django.db.models.fields import FieldDoesNotExist
|
||||||
|
from django.db.models.query_utils import select_related_descend
|
||||||
from django.db.models.sql.where import WhereNode, EverythingNode, AND, OR
|
from django.db.models.sql.where import WhereNode, EverythingNode, AND, OR
|
||||||
from django.db.models.sql.datastructures import Count
|
from django.db.models.sql.datastructures import Count
|
||||||
from django.db.models.fields import FieldDoesNotExist
|
|
||||||
from django.core.exceptions import FieldError
|
from django.core.exceptions import FieldError
|
||||||
from datastructures import EmptyResultSet, Empty, MultiJoin
|
from datastructures import EmptyResultSet, Empty, MultiJoin
|
||||||
from constants import *
|
from constants import *
|
||||||
@ -56,6 +58,7 @@ class Query(object):
|
|||||||
self.start_meta = None
|
self.start_meta = None
|
||||||
self.select_fields = []
|
self.select_fields = []
|
||||||
self.related_select_fields = []
|
self.related_select_fields = []
|
||||||
|
self.dupe_avoidance = {}
|
||||||
|
|
||||||
# SQL-related attributes
|
# SQL-related attributes
|
||||||
self.select = []
|
self.select = []
|
||||||
@ -164,6 +167,7 @@ class Query(object):
|
|||||||
obj.start_meta = self.start_meta
|
obj.start_meta = self.start_meta
|
||||||
obj.select_fields = self.select_fields[:]
|
obj.select_fields = self.select_fields[:]
|
||||||
obj.related_select_fields = self.related_select_fields[:]
|
obj.related_select_fields = self.related_select_fields[:]
|
||||||
|
obj.dupe_avoidance = self.dupe_avoidance.copy()
|
||||||
obj.select = self.select[:]
|
obj.select = self.select[:]
|
||||||
obj.tables = self.tables[:]
|
obj.tables = self.tables[:]
|
||||||
obj.where = deepcopy(self.where)
|
obj.where = deepcopy(self.where)
|
||||||
@ -214,7 +218,7 @@ class Query(object):
|
|||||||
obj.select_related = False
|
obj.select_related = False
|
||||||
obj.related_select_cols = []
|
obj.related_select_cols = []
|
||||||
obj.related_select_fields = []
|
obj.related_select_fields = []
|
||||||
if obj.distinct and len(obj.select) > 1:
|
if len(obj.select) > 1:
|
||||||
obj = self.clone(CountQuery, _query=obj, where=self.where_class(),
|
obj = self.clone(CountQuery, _query=obj, where=self.where_class(),
|
||||||
distinct=False)
|
distinct=False)
|
||||||
obj.select = []
|
obj.select = []
|
||||||
@ -362,10 +366,21 @@ class Query(object):
|
|||||||
item.relabel_aliases(change_map)
|
item.relabel_aliases(change_map)
|
||||||
self.select.append(item)
|
self.select.append(item)
|
||||||
self.select_fields = rhs.select_fields[:]
|
self.select_fields = rhs.select_fields[:]
|
||||||
self.extra_select = rhs.extra_select.copy()
|
|
||||||
self.extra_tables = rhs.extra_tables
|
if connector == OR:
|
||||||
self.extra_where = rhs.extra_where
|
# It would be nice to be able to handle this, but the queries don't
|
||||||
self.extra_params = rhs.extra_params
|
# really make sense (or return consistent value sets). Not worth
|
||||||
|
# the extra complexity when you can write a real query instead.
|
||||||
|
if self.extra_select and rhs.extra_select:
|
||||||
|
raise ValueError("When merging querysets using 'or', you "
|
||||||
|
"cannot have extra(select=...) on both sides.")
|
||||||
|
if self.extra_where and rhs.extra_where:
|
||||||
|
raise ValueError("When merging querysets using 'or', you "
|
||||||
|
"cannot have extra(where=...) on both sides.")
|
||||||
|
self.extra_select.update(rhs.extra_select)
|
||||||
|
self.extra_tables += rhs.extra_tables
|
||||||
|
self.extra_where += rhs.extra_where
|
||||||
|
self.extra_params += rhs.extra_params
|
||||||
|
|
||||||
# Ordering uses the 'rhs' ordering, unless it has none, in which case
|
# Ordering uses the 'rhs' ordering, unless it has none, in which case
|
||||||
# the current ordering is used.
|
# the current ordering is used.
|
||||||
@ -439,28 +454,39 @@ class Query(object):
|
|||||||
self._select_aliases = aliases
|
self._select_aliases = aliases
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def get_default_columns(self, with_aliases=False, col_aliases=None):
|
def get_default_columns(self, with_aliases=False, col_aliases=None,
|
||||||
|
start_alias=None, opts=None, as_pairs=False):
|
||||||
"""
|
"""
|
||||||
Computes the default columns for selecting every field in the base
|
Computes the default columns for selecting every field in the base
|
||||||
model.
|
model.
|
||||||
|
|
||||||
Returns a list of strings, quoted appropriately for use in SQL
|
Returns a list of strings, quoted appropriately for use in SQL
|
||||||
directly, as well as a set of aliases used in the select statement.
|
directly, as well as a set of aliases used in the select statement (if
|
||||||
|
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
|
||||||
|
of strings as the first component and None as the second component).
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
table_alias = self.tables[0]
|
if opts is None:
|
||||||
root_pk = self.model._meta.pk.column
|
opts = self.model._meta
|
||||||
|
if start_alias:
|
||||||
|
table_alias = start_alias
|
||||||
|
else:
|
||||||
|
table_alias = self.tables[0]
|
||||||
|
root_pk = opts.pk.column
|
||||||
seen = {None: table_alias}
|
seen = {None: table_alias}
|
||||||
qn = self.quote_name_unless_alias
|
qn = self.quote_name_unless_alias
|
||||||
qn2 = self.connection.ops.quote_name
|
qn2 = self.connection.ops.quote_name
|
||||||
aliases = set()
|
aliases = set()
|
||||||
for field, model in self.model._meta.get_fields_with_model():
|
for field, model in opts.get_fields_with_model():
|
||||||
try:
|
try:
|
||||||
alias = seen[model]
|
alias = seen[model]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
alias = self.join((table_alias, model._meta.db_table,
|
alias = self.join((table_alias, model._meta.db_table,
|
||||||
root_pk, model._meta.pk.column))
|
root_pk, model._meta.pk.column))
|
||||||
seen[model] = alias
|
seen[model] = alias
|
||||||
|
if as_pairs:
|
||||||
|
result.append((alias, field.column))
|
||||||
|
continue
|
||||||
if with_aliases and field.column in col_aliases:
|
if with_aliases and field.column in col_aliases:
|
||||||
c_alias = 'Col%d' % len(col_aliases)
|
c_alias = 'Col%d' % len(col_aliases)
|
||||||
result.append('%s.%s AS %s' % (qn(alias),
|
result.append('%s.%s AS %s' % (qn(alias),
|
||||||
@ -473,6 +499,8 @@ class Query(object):
|
|||||||
aliases.add(r)
|
aliases.add(r)
|
||||||
if with_aliases:
|
if with_aliases:
|
||||||
col_aliases.add(field.column)
|
col_aliases.add(field.column)
|
||||||
|
if as_pairs:
|
||||||
|
return result, None
|
||||||
return result, aliases
|
return result, aliases
|
||||||
|
|
||||||
def get_from_clause(self):
|
def get_from_clause(self):
|
||||||
@ -609,6 +637,11 @@ class Query(object):
|
|||||||
alias, False)
|
alias, False)
|
||||||
alias = joins[-1]
|
alias = joins[-1]
|
||||||
col = target.column
|
col = target.column
|
||||||
|
if not field.rel:
|
||||||
|
# To avoid inadvertent trimming of a necessary alias, use the
|
||||||
|
# refcount to show that we are referencing a non-relation field on
|
||||||
|
# the model.
|
||||||
|
self.ref_alias(alias)
|
||||||
|
|
||||||
# Must use left outer joins for nullable fields.
|
# Must use left outer joins for nullable fields.
|
||||||
for join in joins:
|
for join in joins:
|
||||||
@ -829,8 +862,8 @@ class Query(object):
|
|||||||
|
|
||||||
if reuse and always_create and table in self.table_map:
|
if reuse and always_create and table in self.table_map:
|
||||||
# Convert the 'reuse' to case to be "exclude everything but the
|
# Convert the 'reuse' to case to be "exclude everything but the
|
||||||
# reusable set for this table".
|
# reusable set, minus exclusions, for this table".
|
||||||
exclusions = set(self.table_map[table]).difference(reuse)
|
exclusions = set(self.table_map[table]).difference(reuse).union(set(exclusions))
|
||||||
always_create = False
|
always_create = False
|
||||||
t_ident = (lhs_table, table, lhs_col, col)
|
t_ident = (lhs_table, table, lhs_col, col)
|
||||||
if not always_create:
|
if not always_create:
|
||||||
@ -865,7 +898,8 @@ class Query(object):
|
|||||||
return alias
|
return alias
|
||||||
|
|
||||||
def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
|
def fill_related_selections(self, opts=None, root_alias=None, cur_depth=1,
|
||||||
used=None, requested=None, restricted=None, nullable=None):
|
used=None, requested=None, restricted=None, nullable=None,
|
||||||
|
dupe_set=None):
|
||||||
"""
|
"""
|
||||||
Fill in the information needed for a select_related query. The current
|
Fill in the information needed for a select_related query. The current
|
||||||
depth is measured as the number of connections away from the root model
|
depth is measured as the number of connections away from the root model
|
||||||
@ -875,6 +909,7 @@ class Query(object):
|
|||||||
if not restricted and self.max_depth and cur_depth > self.max_depth:
|
if not restricted and self.max_depth and cur_depth > self.max_depth:
|
||||||
# We've recursed far enough; bail out.
|
# We've recursed far enough; bail out.
|
||||||
return
|
return
|
||||||
|
|
||||||
if not opts:
|
if not opts:
|
||||||
opts = self.get_meta()
|
opts = self.get_meta()
|
||||||
root_alias = self.get_initial_alias()
|
root_alias = self.get_initial_alias()
|
||||||
@ -882,6 +917,10 @@ class Query(object):
|
|||||||
self.related_select_fields = []
|
self.related_select_fields = []
|
||||||
if not used:
|
if not used:
|
||||||
used = set()
|
used = set()
|
||||||
|
if dupe_set is None:
|
||||||
|
dupe_set = set()
|
||||||
|
orig_dupe_set = dupe_set
|
||||||
|
orig_used = used
|
||||||
|
|
||||||
# Setup for the case when only particular related fields should be
|
# Setup for the case when only particular related fields should be
|
||||||
# included in the related selection.
|
# included in the related selection.
|
||||||
@ -893,9 +932,10 @@ class Query(object):
|
|||||||
restricted = False
|
restricted = False
|
||||||
|
|
||||||
for f, model in opts.get_fields_with_model():
|
for f, model in opts.get_fields_with_model():
|
||||||
if (not f.rel or (restricted and f.name not in requested) or
|
if not select_related_descend(f, restricted, requested):
|
||||||
(not restricted and f.null) or f.rel.parent_link):
|
|
||||||
continue
|
continue
|
||||||
|
dupe_set = orig_dupe_set.copy()
|
||||||
|
used = orig_used.copy()
|
||||||
table = f.rel.to._meta.db_table
|
table = f.rel.to._meta.db_table
|
||||||
if nullable or f.null:
|
if nullable or f.null:
|
||||||
promote = True
|
promote = True
|
||||||
@ -906,18 +946,32 @@ class Query(object):
|
|||||||
alias = root_alias
|
alias = root_alias
|
||||||
for int_model in opts.get_base_chain(model):
|
for int_model in opts.get_base_chain(model):
|
||||||
lhs_col = int_opts.parents[int_model].column
|
lhs_col = int_opts.parents[int_model].column
|
||||||
|
dedupe = lhs_col in opts.duplicate_targets
|
||||||
|
if dedupe:
|
||||||
|
used.update(self.dupe_avoidance.get(id(opts), lhs_col),
|
||||||
|
())
|
||||||
|
dupe_set.add((opts, lhs_col))
|
||||||
int_opts = int_model._meta
|
int_opts = int_model._meta
|
||||||
alias = self.join((alias, int_opts.db_table, lhs_col,
|
alias = self.join((alias, int_opts.db_table, lhs_col,
|
||||||
int_opts.pk.column), exclusions=used,
|
int_opts.pk.column), exclusions=used,
|
||||||
promote=promote)
|
promote=promote)
|
||||||
|
for (dupe_opts, dupe_col) in dupe_set:
|
||||||
|
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
|
||||||
else:
|
else:
|
||||||
alias = root_alias
|
alias = root_alias
|
||||||
|
|
||||||
|
dedupe = f.column in opts.duplicate_targets
|
||||||
|
if dupe_set or dedupe:
|
||||||
|
used.update(self.dupe_avoidance.get((id(opts), f.column), ()))
|
||||||
|
if dedupe:
|
||||||
|
dupe_set.add((opts, f.column))
|
||||||
|
|
||||||
alias = self.join((alias, table, f.column,
|
alias = self.join((alias, table, f.column,
|
||||||
f.rel.get_related_field().column), exclusions=used,
|
f.rel.get_related_field().column), exclusions=used,
|
||||||
promote=promote)
|
promote=promote)
|
||||||
used.add(alias)
|
used.add(alias)
|
||||||
self.related_select_cols.extend([(alias, f2.column)
|
self.related_select_cols.extend(self.get_default_columns(
|
||||||
for f2 in f.rel.to._meta.fields])
|
start_alias=alias, opts=f.rel.to._meta, as_pairs=True)[0])
|
||||||
self.related_select_fields.extend(f.rel.to._meta.fields)
|
self.related_select_fields.extend(f.rel.to._meta.fields)
|
||||||
if restricted:
|
if restricted:
|
||||||
next = requested.get(f.name, {})
|
next = requested.get(f.name, {})
|
||||||
@ -927,8 +981,10 @@ class Query(object):
|
|||||||
new_nullable = f.null
|
new_nullable = f.null
|
||||||
else:
|
else:
|
||||||
new_nullable = None
|
new_nullable = None
|
||||||
|
for dupe_opts, dupe_col in dupe_set:
|
||||||
|
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
|
||||||
self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1,
|
self.fill_related_selections(f.rel.to._meta, alias, cur_depth + 1,
|
||||||
used, next, restricted, new_nullable)
|
used, next, restricted, new_nullable, dupe_set)
|
||||||
|
|
||||||
def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
|
def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
|
||||||
can_reuse=None):
|
can_reuse=None):
|
||||||
@ -1048,7 +1104,19 @@ class Query(object):
|
|||||||
# that's harmless.
|
# that's harmless.
|
||||||
self.promote_alias(table)
|
self.promote_alias(table)
|
||||||
|
|
||||||
self.where.add((alias, col, field, lookup_type, value), connector)
|
# To save memory and copying time, convert the value from the Python
|
||||||
|
# object to the actual value used in the SQL query.
|
||||||
|
if field:
|
||||||
|
params = field.get_db_prep_lookup(lookup_type, value)
|
||||||
|
else:
|
||||||
|
params = Field().get_db_prep_lookup(lookup_type, value)
|
||||||
|
if isinstance(value, datetime.datetime):
|
||||||
|
annotation = datetime.datetime
|
||||||
|
else:
|
||||||
|
annotation = bool(value)
|
||||||
|
|
||||||
|
self.where.add((alias, col, field.db_type(), lookup_type, annotation,
|
||||||
|
params), connector)
|
||||||
|
|
||||||
if negate:
|
if negate:
|
||||||
for alias in join_list:
|
for alias in join_list:
|
||||||
@ -1058,7 +1126,8 @@ class Query(object):
|
|||||||
for alias in join_list:
|
for alias in join_list:
|
||||||
if self.alias_map[alias][JOIN_TYPE] == self.LOUTER:
|
if self.alias_map[alias][JOIN_TYPE] == self.LOUTER:
|
||||||
j_col = self.alias_map[alias][RHS_JOIN_COL]
|
j_col = self.alias_map[alias][RHS_JOIN_COL]
|
||||||
entry = Node([(alias, j_col, None, 'isnull', True)])
|
entry = Node([(alias, j_col, None, 'isnull', True,
|
||||||
|
[True])])
|
||||||
entry.negate()
|
entry.negate()
|
||||||
self.where.add(entry, AND)
|
self.where.add(entry, AND)
|
||||||
break
|
break
|
||||||
@ -1066,7 +1135,7 @@ class Query(object):
|
|||||||
# Leaky abstraction artifact: We have to specifically
|
# Leaky abstraction artifact: We have to specifically
|
||||||
# exclude the "foo__in=[]" case from this handling, because
|
# exclude the "foo__in=[]" case from this handling, because
|
||||||
# it's short-circuited in the Where class.
|
# it's short-circuited in the Where class.
|
||||||
entry = Node([(alias, col, field, 'isnull', True)])
|
entry = Node([(alias, col, None, 'isnull', True, [True])])
|
||||||
entry.negate()
|
entry.negate()
|
||||||
self.where.add(entry, AND)
|
self.where.add(entry, AND)
|
||||||
|
|
||||||
@ -1114,7 +1183,9 @@ class Query(object):
|
|||||||
(which gives the table we are joining to), 'alias' is the alias for the
|
(which gives the table we are joining to), 'alias' is the alias for the
|
||||||
table we are joining to. If dupe_multis is True, any many-to-many or
|
table we are joining to. If dupe_multis is True, any many-to-many or
|
||||||
many-to-one joins will always create a new alias (necessary for
|
many-to-one joins will always create a new alias (necessary for
|
||||||
disjunctive filters).
|
disjunctive filters). If can_reuse is not None, it's a list of aliases
|
||||||
|
that can be reused in these joins (nothing else can be reused in this
|
||||||
|
case).
|
||||||
|
|
||||||
Returns the final field involved in the join, the target database
|
Returns the final field involved in the join, the target database
|
||||||
column (used for any 'where' constraint), the final 'opts' value and the
|
column (used for any 'where' constraint), the final 'opts' value and the
|
||||||
@ -1122,7 +1193,14 @@ class Query(object):
|
|||||||
"""
|
"""
|
||||||
joins = [alias]
|
joins = [alias]
|
||||||
last = [0]
|
last = [0]
|
||||||
|
dupe_set = set()
|
||||||
|
exclusions = set()
|
||||||
for pos, name in enumerate(names):
|
for pos, name in enumerate(names):
|
||||||
|
try:
|
||||||
|
exclusions.add(int_alias)
|
||||||
|
except NameError:
|
||||||
|
pass
|
||||||
|
exclusions.add(alias)
|
||||||
last.append(len(joins))
|
last.append(len(joins))
|
||||||
if name == 'pk':
|
if name == 'pk':
|
||||||
name = opts.pk.name
|
name = opts.pk.name
|
||||||
@ -1141,6 +1219,7 @@ class Query(object):
|
|||||||
names = opts.get_all_field_names()
|
names = opts.get_all_field_names()
|
||||||
raise FieldError("Cannot resolve keyword %r into field. "
|
raise FieldError("Cannot resolve keyword %r into field. "
|
||||||
"Choices are: %s" % (name, ", ".join(names)))
|
"Choices are: %s" % (name, ", ".join(names)))
|
||||||
|
|
||||||
if not allow_many and (m2m or not direct):
|
if not allow_many and (m2m or not direct):
|
||||||
for alias in joins:
|
for alias in joins:
|
||||||
self.unref_alias(alias)
|
self.unref_alias(alias)
|
||||||
@ -1150,12 +1229,27 @@ class Query(object):
|
|||||||
alias_list = []
|
alias_list = []
|
||||||
for int_model in opts.get_base_chain(model):
|
for int_model in opts.get_base_chain(model):
|
||||||
lhs_col = opts.parents[int_model].column
|
lhs_col = opts.parents[int_model].column
|
||||||
|
dedupe = lhs_col in opts.duplicate_targets
|
||||||
|
if dedupe:
|
||||||
|
exclusions.update(self.dupe_avoidance.get(
|
||||||
|
(id(opts), lhs_col), ()))
|
||||||
|
dupe_set.add((opts, lhs_col))
|
||||||
opts = int_model._meta
|
opts = int_model._meta
|
||||||
alias = self.join((alias, opts.db_table, lhs_col,
|
alias = self.join((alias, opts.db_table, lhs_col,
|
||||||
opts.pk.column), exclusions=joins)
|
opts.pk.column), exclusions=exclusions)
|
||||||
joins.append(alias)
|
joins.append(alias)
|
||||||
|
exclusions.add(alias)
|
||||||
|
for (dupe_opts, dupe_col) in dupe_set:
|
||||||
|
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
|
||||||
cached_data = opts._join_cache.get(name)
|
cached_data = opts._join_cache.get(name)
|
||||||
orig_opts = opts
|
orig_opts = opts
|
||||||
|
dupe_col = direct and field.column or field.field.column
|
||||||
|
dedupe = dupe_col in opts.duplicate_targets
|
||||||
|
if dupe_set or dedupe:
|
||||||
|
if dedupe:
|
||||||
|
dupe_set.add((opts, dupe_col))
|
||||||
|
exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col),
|
||||||
|
()))
|
||||||
|
|
||||||
if direct:
|
if direct:
|
||||||
if m2m:
|
if m2m:
|
||||||
@ -1177,9 +1271,11 @@ class Query(object):
|
|||||||
target)
|
target)
|
||||||
|
|
||||||
int_alias = self.join((alias, table1, from_col1, to_col1),
|
int_alias = self.join((alias, table1, from_col1, to_col1),
|
||||||
dupe_multis, joins, nullable=True, reuse=can_reuse)
|
dupe_multis, exclusions, nullable=True,
|
||||||
|
reuse=can_reuse)
|
||||||
alias = self.join((int_alias, table2, from_col2, to_col2),
|
alias = self.join((int_alias, table2, from_col2, to_col2),
|
||||||
dupe_multis, joins, nullable=True, reuse=can_reuse)
|
dupe_multis, exclusions, nullable=True,
|
||||||
|
reuse=can_reuse)
|
||||||
joins.extend([int_alias, alias])
|
joins.extend([int_alias, alias])
|
||||||
elif field.rel:
|
elif field.rel:
|
||||||
# One-to-one or many-to-one field
|
# One-to-one or many-to-one field
|
||||||
@ -1195,7 +1291,7 @@ class Query(object):
|
|||||||
opts, target)
|
opts, target)
|
||||||
|
|
||||||
alias = self.join((alias, table, from_col, to_col),
|
alias = self.join((alias, table, from_col, to_col),
|
||||||
exclusions=joins, nullable=field.null)
|
exclusions=exclusions, nullable=field.null)
|
||||||
joins.append(alias)
|
joins.append(alias)
|
||||||
else:
|
else:
|
||||||
# Non-relation fields.
|
# Non-relation fields.
|
||||||
@ -1223,9 +1319,11 @@ class Query(object):
|
|||||||
target)
|
target)
|
||||||
|
|
||||||
int_alias = self.join((alias, table1, from_col1, to_col1),
|
int_alias = self.join((alias, table1, from_col1, to_col1),
|
||||||
dupe_multis, joins, nullable=True, reuse=can_reuse)
|
dupe_multis, exclusions, nullable=True,
|
||||||
|
reuse=can_reuse)
|
||||||
alias = self.join((int_alias, table2, from_col2, to_col2),
|
alias = self.join((int_alias, table2, from_col2, to_col2),
|
||||||
dupe_multis, joins, nullable=True, reuse=can_reuse)
|
dupe_multis, exclusions, nullable=True,
|
||||||
|
reuse=can_reuse)
|
||||||
joins.extend([int_alias, alias])
|
joins.extend([int_alias, alias])
|
||||||
else:
|
else:
|
||||||
# One-to-many field (ForeignKey defined on the target model)
|
# One-to-many field (ForeignKey defined on the target model)
|
||||||
@ -1243,14 +1341,34 @@ class Query(object):
|
|||||||
opts, target)
|
opts, target)
|
||||||
|
|
||||||
alias = self.join((alias, table, from_col, to_col),
|
alias = self.join((alias, table, from_col, to_col),
|
||||||
dupe_multis, joins, nullable=True, reuse=can_reuse)
|
dupe_multis, exclusions, nullable=True,
|
||||||
|
reuse=can_reuse)
|
||||||
joins.append(alias)
|
joins.append(alias)
|
||||||
|
|
||||||
|
for (dupe_opts, dupe_col) in dupe_set:
|
||||||
|
try:
|
||||||
|
self.update_dupe_avoidance(dupe_opts, dupe_col, int_alias)
|
||||||
|
except NameError:
|
||||||
|
self.update_dupe_avoidance(dupe_opts, dupe_col, alias)
|
||||||
|
|
||||||
if pos != len(names) - 1:
|
if pos != len(names) - 1:
|
||||||
raise FieldError("Join on field %r not permitted." % name)
|
raise FieldError("Join on field %r not permitted." % name)
|
||||||
|
|
||||||
return field, target, opts, joins, last
|
return field, target, opts, joins, last
|
||||||
|
|
||||||
|
def update_dupe_avoidance(self, opts, col, alias):
|
||||||
|
"""
|
||||||
|
For a column that is one of multiple pointing to the same table, update
|
||||||
|
the internal data structures to note that this alias shouldn't be used
|
||||||
|
for those other columns.
|
||||||
|
"""
|
||||||
|
ident = id(opts)
|
||||||
|
for name in opts.duplicate_targets[col]:
|
||||||
|
try:
|
||||||
|
self.dupe_avoidance[ident, name].add(alias)
|
||||||
|
except KeyError:
|
||||||
|
self.dupe_avoidance[ident, name] = set([alias])
|
||||||
|
|
||||||
def split_exclude(self, filter_expr, prefix):
|
def split_exclude(self, filter_expr, prefix):
|
||||||
"""
|
"""
|
||||||
When doing an exclude against any kind of N-to-many relation, we need
|
When doing an exclude against any kind of N-to-many relation, we need
|
||||||
|
@ -49,7 +49,7 @@ class DeleteQuery(Query):
|
|||||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||||
where = self.where_class()
|
where = self.where_class()
|
||||||
where.add((None, related.field.m2m_reverse_name(),
|
where.add((None, related.field.m2m_reverse_name(),
|
||||||
related.field, 'in',
|
related.field.db_type(), 'in', True,
|
||||||
pk_list[offset : offset+GET_ITERATOR_CHUNK_SIZE]),
|
pk_list[offset : offset+GET_ITERATOR_CHUNK_SIZE]),
|
||||||
AND)
|
AND)
|
||||||
self.do_query(related.field.m2m_db_table(), where)
|
self.do_query(related.field.m2m_db_table(), where)
|
||||||
@ -59,11 +59,11 @@ class DeleteQuery(Query):
|
|||||||
if isinstance(f, generic.GenericRelation):
|
if isinstance(f, generic.GenericRelation):
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
field = f.rel.to._meta.get_field(f.content_type_field_name)
|
field = f.rel.to._meta.get_field(f.content_type_field_name)
|
||||||
w1.add((None, field.column, field, 'exact',
|
w1.add((None, field.column, field.db_type(), 'exact', True,
|
||||||
ContentType.objects.get_for_model(cls).id), AND)
|
[ContentType.objects.get_for_model(cls).id]), AND)
|
||||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||||
where = self.where_class()
|
where = self.where_class()
|
||||||
where.add((None, f.m2m_column_name(), f, 'in',
|
where.add((None, f.m2m_column_name(), f.db_type(), 'in', True,
|
||||||
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
|
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
|
||||||
AND)
|
AND)
|
||||||
if w1:
|
if w1:
|
||||||
@ -81,7 +81,7 @@ class DeleteQuery(Query):
|
|||||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||||
where = self.where_class()
|
where = self.where_class()
|
||||||
field = self.model._meta.pk
|
field = self.model._meta.pk
|
||||||
where.add((None, field.column, field, 'in',
|
where.add((None, field.column, field.db_type(), 'in', True,
|
||||||
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), AND)
|
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]), AND)
|
||||||
self.do_query(self.model._meta.db_table, where)
|
self.do_query(self.model._meta.db_table, where)
|
||||||
|
|
||||||
@ -204,7 +204,7 @@ class UpdateQuery(Query):
|
|||||||
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
|
||||||
self.where = self.where_class()
|
self.where = self.where_class()
|
||||||
f = self.model._meta.pk
|
f = self.model._meta.pk
|
||||||
self.where.add((None, f.column, f, 'in',
|
self.where.add((None, f.column, f.db_type(), 'in', True,
|
||||||
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
|
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]),
|
||||||
AND)
|
AND)
|
||||||
self.values = [(related_field.column, None, '%s')]
|
self.values = [(related_field.column, None, '%s')]
|
||||||
|
@ -21,8 +21,9 @@ class WhereNode(tree.Node):
|
|||||||
the correct SQL).
|
the correct SQL).
|
||||||
|
|
||||||
The children in this tree are usually either Q-like objects or lists of
|
The children in this tree are usually either Q-like objects or lists of
|
||||||
[table_alias, field_name, field_class, lookup_type, value]. However, a
|
[table_alias, field_name, db_type, lookup_type, value_annotation,
|
||||||
child could also be any class with as_sql() and relabel_aliases() methods.
|
params]. However, a child could also be any class with as_sql() and
|
||||||
|
relabel_aliases() methods.
|
||||||
"""
|
"""
|
||||||
default = AND
|
default = AND
|
||||||
|
|
||||||
@ -88,29 +89,24 @@ class WhereNode(tree.Node):
|
|||||||
|
|
||||||
def make_atom(self, child, qn):
|
def make_atom(self, child, qn):
|
||||||
"""
|
"""
|
||||||
Turn a tuple (table_alias, field_name, field_class, lookup_type, value)
|
Turn a tuple (table_alias, field_name, db_type, lookup_type,
|
||||||
into valid SQL.
|
value_annot, params) into valid SQL.
|
||||||
|
|
||||||
Returns the string for the SQL fragment and the parameters to use for
|
Returns the string for the SQL fragment and the parameters to use for
|
||||||
it.
|
it.
|
||||||
"""
|
"""
|
||||||
table_alias, name, field, lookup_type, value = child
|
table_alias, name, db_type, lookup_type, value_annot, params = child
|
||||||
if table_alias:
|
if table_alias:
|
||||||
lhs = '%s.%s' % (qn(table_alias), qn(name))
|
lhs = '%s.%s' % (qn(table_alias), qn(name))
|
||||||
else:
|
else:
|
||||||
lhs = qn(name)
|
lhs = qn(name)
|
||||||
db_type = field and field.db_type() or None
|
|
||||||
field_sql = connection.ops.field_cast_sql(db_type) % lhs
|
field_sql = connection.ops.field_cast_sql(db_type) % lhs
|
||||||
|
|
||||||
if isinstance(value, datetime.datetime):
|
if value_annot is datetime.datetime:
|
||||||
cast_sql = connection.ops.datetime_cast_sql()
|
cast_sql = connection.ops.datetime_cast_sql()
|
||||||
else:
|
else:
|
||||||
cast_sql = '%s'
|
cast_sql = '%s'
|
||||||
|
|
||||||
if field:
|
|
||||||
params = field.get_db_prep_lookup(lookup_type, value)
|
|
||||||
else:
|
|
||||||
params = Field().get_db_prep_lookup(lookup_type, value)
|
|
||||||
if isinstance(params, QueryWrapper):
|
if isinstance(params, QueryWrapper):
|
||||||
extra, params = params.data
|
extra, params = params.data
|
||||||
else:
|
else:
|
||||||
@ -123,11 +119,11 @@ class WhereNode(tree.Node):
|
|||||||
connection.operators[lookup_type] % cast_sql), params)
|
connection.operators[lookup_type] % cast_sql), params)
|
||||||
|
|
||||||
if lookup_type == 'in':
|
if lookup_type == 'in':
|
||||||
if not value:
|
if not value_annot:
|
||||||
raise EmptyResultSet
|
raise EmptyResultSet
|
||||||
if extra:
|
if extra:
|
||||||
return ('%s IN %s' % (field_sql, extra), params)
|
return ('%s IN %s' % (field_sql, extra), params)
|
||||||
return ('%s IN (%s)' % (field_sql, ', '.join(['%s'] * len(value))),
|
return ('%s IN (%s)' % (field_sql, ', '.join(['%s'] * len(params))),
|
||||||
params)
|
params)
|
||||||
elif lookup_type in ('range', 'year'):
|
elif lookup_type in ('range', 'year'):
|
||||||
return ('%s BETWEEN %%s and %%s' % field_sql, params)
|
return ('%s BETWEEN %%s and %%s' % field_sql, params)
|
||||||
@ -135,8 +131,8 @@ class WhereNode(tree.Node):
|
|||||||
return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type,
|
return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type,
|
||||||
field_sql), params)
|
field_sql), params)
|
||||||
elif lookup_type == 'isnull':
|
elif lookup_type == 'isnull':
|
||||||
return ('%s IS %sNULL' % (field_sql, (not value and 'NOT ' or '')),
|
return ('%s IS %sNULL' % (field_sql,
|
||||||
params)
|
(not value_annot and 'NOT ' or '')), ())
|
||||||
elif lookup_type == 'search':
|
elif lookup_type == 'search':
|
||||||
return (connection.ops.fulltext_search_sql(field_sql), params)
|
return (connection.ops.fulltext_search_sql(field_sql), params)
|
||||||
elif lookup_type in ('regex', 'iregex'):
|
elif lookup_type in ('regex', 'iregex'):
|
||||||
|
@ -196,7 +196,10 @@ def commit_on_success(func):
|
|||||||
managed(True)
|
managed(True)
|
||||||
try:
|
try:
|
||||||
res = func(*args, **kw)
|
res = func(*args, **kw)
|
||||||
except Exception, e:
|
except (Exception, KeyboardInterrupt, SystemExit):
|
||||||
|
# (We handle KeyboardInterrupt and SystemExit specially, since
|
||||||
|
# they don't inherit from Exception in Python 2.5, but we
|
||||||
|
# should treat them uniformly here.)
|
||||||
if is_dirty():
|
if is_dirty():
|
||||||
rollback()
|
rollback()
|
||||||
raise
|
raise
|
||||||
|
@ -19,14 +19,14 @@ class ConditionalGetMiddleware(object):
|
|||||||
# Setting the status is enough here. The response handling path
|
# Setting the status is enough here. The response handling path
|
||||||
# automatically removes content for this status code (in
|
# automatically removes content for this status code (in
|
||||||
# http.conditional_content_removal()).
|
# http.conditional_content_removal()).
|
||||||
response.status = 304
|
response.status_code = 304
|
||||||
|
|
||||||
if response.has_header('Last-Modified'):
|
if response.has_header('Last-Modified'):
|
||||||
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE', None)
|
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE', None)
|
||||||
if if_modified_since == response['Last-Modified']:
|
if if_modified_since == response['Last-Modified']:
|
||||||
# Setting the status code is enough here (same reasons as
|
# Setting the status code is enough here (same reasons as
|
||||||
# above).
|
# above).
|
||||||
response.status = 304
|
response.status_code = 304
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -535,13 +535,17 @@ class BooleanField(Field):
|
|||||||
|
|
||||||
def clean(self, value):
|
def clean(self, value):
|
||||||
"""Returns a Python boolean object."""
|
"""Returns a Python boolean object."""
|
||||||
super(BooleanField, self).clean(value)
|
|
||||||
# Explicitly check for the string 'False', which is what a hidden field
|
# Explicitly check for the string 'False', which is what a hidden field
|
||||||
# will submit for False. Because bool("True") == True, we don't need to
|
# will submit for False. Because bool("True") == True, we don't need to
|
||||||
# handle that explicitly.
|
# handle that explicitly.
|
||||||
if value == 'False':
|
if value == 'False':
|
||||||
return False
|
value = False
|
||||||
return bool(value)
|
else:
|
||||||
|
value = bool(value)
|
||||||
|
super(BooleanField, self).clean(value)
|
||||||
|
if not value and self.required:
|
||||||
|
raise ValidationError(self.error_messages['required'])
|
||||||
|
return value
|
||||||
|
|
||||||
class NullBooleanField(BooleanField):
|
class NullBooleanField(BooleanField):
|
||||||
"""
|
"""
|
||||||
|
@ -4,10 +4,12 @@ from urlparse import urlsplit, urlunsplit
|
|||||||
|
|
||||||
from django.http import QueryDict
|
from django.http import QueryDict
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
from django.conf import settings
|
||||||
from django.core import mail
|
from django.core import mail
|
||||||
from django.core.management import call_command
|
from django.core.management import call_command
|
||||||
from django.test import _doctest as doctest
|
from django.test import _doctest as doctest
|
||||||
from django.test.client import Client
|
from django.test.client import Client
|
||||||
|
from django.core.urlresolvers import clear_url_caches
|
||||||
|
|
||||||
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
|
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
|
||||||
|
|
||||||
@ -54,6 +56,8 @@ class TestCase(unittest.TestCase):
|
|||||||
* Flushing the database.
|
* Flushing the database.
|
||||||
* If the Test Case class has a 'fixtures' member, installing the
|
* If the Test Case class has a 'fixtures' member, installing the
|
||||||
named fixtures.
|
named fixtures.
|
||||||
|
* If the Test Case class has a 'urls' member, replace the
|
||||||
|
ROOT_URLCONF with it.
|
||||||
* Clearing the mail test outbox.
|
* Clearing the mail test outbox.
|
||||||
"""
|
"""
|
||||||
call_command('flush', verbosity=0, interactive=False)
|
call_command('flush', verbosity=0, interactive=False)
|
||||||
@ -61,6 +65,10 @@ class TestCase(unittest.TestCase):
|
|||||||
# We have to use this slightly awkward syntax due to the fact
|
# We have to use this slightly awkward syntax due to the fact
|
||||||
# that we're using *args and **kwargs together.
|
# that we're using *args and **kwargs together.
|
||||||
call_command('loaddata', *self.fixtures, **{'verbosity': 0})
|
call_command('loaddata', *self.fixtures, **{'verbosity': 0})
|
||||||
|
if hasattr(self, 'urls'):
|
||||||
|
self._old_root_urlconf = settings.ROOT_URLCONF
|
||||||
|
settings.ROOT_URLCONF = self.urls
|
||||||
|
clear_url_caches()
|
||||||
mail.outbox = []
|
mail.outbox = []
|
||||||
|
|
||||||
def __call__(self, result=None):
|
def __call__(self, result=None):
|
||||||
@ -79,6 +87,23 @@ class TestCase(unittest.TestCase):
|
|||||||
result.addError(self, sys.exc_info())
|
result.addError(self, sys.exc_info())
|
||||||
return
|
return
|
||||||
super(TestCase, self).__call__(result)
|
super(TestCase, self).__call__(result)
|
||||||
|
try:
|
||||||
|
self._post_teardown()
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
raise
|
||||||
|
except Exception:
|
||||||
|
import sys
|
||||||
|
result.addError(self, sys.exc_info())
|
||||||
|
return
|
||||||
|
|
||||||
|
def _post_teardown(self):
|
||||||
|
""" Performs any post-test things. This includes:
|
||||||
|
|
||||||
|
* Putting back the original ROOT_URLCONF if it was changed.
|
||||||
|
"""
|
||||||
|
if hasattr(self, '_old_root_urlconf'):
|
||||||
|
settings.ROOT_URLCONF = self._old_root_urlconf
|
||||||
|
clear_url_caches()
|
||||||
|
|
||||||
def assertRedirects(self, response, expected_url, status_code=302,
|
def assertRedirects(self, response, expected_url, status_code=302,
|
||||||
target_status_code=200, host=None):
|
target_status_code=200, host=None):
|
||||||
|
@ -2,7 +2,8 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if os.name == 'posix':
|
if os.name == 'posix':
|
||||||
def become_daemon(our_home_dir='.', out_log='/dev/null', err_log='/dev/null'):
|
def become_daemon(our_home_dir='.', out_log='/dev/null',
|
||||||
|
err_log='/dev/null', umask=022):
|
||||||
"Robustly turn into a UNIX daemon, running in our_home_dir."
|
"Robustly turn into a UNIX daemon, running in our_home_dir."
|
||||||
# First fork
|
# First fork
|
||||||
try:
|
try:
|
||||||
@ -13,7 +14,7 @@ if os.name == 'posix':
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
os.setsid()
|
os.setsid()
|
||||||
os.chdir(our_home_dir)
|
os.chdir(our_home_dir)
|
||||||
os.umask(0)
|
os.umask(umask)
|
||||||
|
|
||||||
# Second fork
|
# Second fork
|
||||||
try:
|
try:
|
||||||
@ -32,13 +33,13 @@ if os.name == 'posix':
|
|||||||
# Set custom file descriptors so that they get proper buffering.
|
# Set custom file descriptors so that they get proper buffering.
|
||||||
sys.stdout, sys.stderr = so, se
|
sys.stdout, sys.stderr = so, se
|
||||||
else:
|
else:
|
||||||
def become_daemon(our_home_dir='.', out_log=None, err_log=None):
|
def become_daemon(our_home_dir='.', out_log=None, err_log=None, umask=022):
|
||||||
"""
|
"""
|
||||||
If we're not running under a POSIX system, just simulate the daemon
|
If we're not running under a POSIX system, just simulate the daemon
|
||||||
mode by doing redirections and directory changing.
|
mode by doing redirections and directory changing.
|
||||||
"""
|
"""
|
||||||
os.chdir(our_home_dir)
|
os.chdir(our_home_dir)
|
||||||
os.umask(0)
|
os.umask(umask)
|
||||||
sys.stdin.close()
|
sys.stdin.close()
|
||||||
sys.stdout.close()
|
sys.stdout.close()
|
||||||
sys.stderr.close()
|
sys.stderr.close()
|
||||||
|
@ -39,9 +39,10 @@ with the standard ``Auth*`` and ``Require`` directives::
|
|||||||
example at the bottom of this note).
|
example at the bottom of this note).
|
||||||
|
|
||||||
You'll also need to insert configuration directives that prevent Apache
|
You'll also need to insert configuration directives that prevent Apache
|
||||||
from trying to use other authentication modules. Depending on which other
|
from trying to use other authentication modules, as well as specifying
|
||||||
authentication modules you have loaded, you might need one or more of
|
the ``AuthUserFile`` directive and pointing it to ``/dev/null``. Depending
|
||||||
the following directives::
|
on which other authentication modules you have loaded, you might need one
|
||||||
|
or more of the following directives::
|
||||||
|
|
||||||
AuthBasicAuthoritative Off
|
AuthBasicAuthoritative Off
|
||||||
AuthDefaultAuthoritative Off
|
AuthDefaultAuthoritative Off
|
||||||
@ -65,6 +66,7 @@ with the standard ``Auth*`` and ``Require`` directives::
|
|||||||
<Location /example/>
|
<Location /example/>
|
||||||
AuthType Basic
|
AuthType Basic
|
||||||
AuthName "example.com"
|
AuthName "example.com"
|
||||||
|
**AuthUserFile /dev/null**
|
||||||
**AuthBasicAuthoritative Off**
|
**AuthBasicAuthoritative Off**
|
||||||
Require valid-user
|
Require valid-user
|
||||||
|
|
||||||
|
@ -443,6 +443,31 @@ This is roughly equivalent to::
|
|||||||
Note, however, that the first of these will raise ``IndexError`` while the
|
Note, however, that the first of these will raise ``IndexError`` while the
|
||||||
second will raise ``DoesNotExist`` if no objects match the given criteria.
|
second will raise ``DoesNotExist`` if no objects match the given criteria.
|
||||||
|
|
||||||
|
Combining QuerySets
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If you have two ``QuerySet`` instances that act on the same model, you can
|
||||||
|
combine them using ``&`` and ``|`` to get the items that are in both result
|
||||||
|
sets or in either results set, respectively. For example::
|
||||||
|
|
||||||
|
Entry.objects.filter(pubdate__gte=date1) & \
|
||||||
|
Entry.objects.filter(headline__startswith="What")
|
||||||
|
|
||||||
|
will combine the two queries into a single SQL query. Of course, in this case
|
||||||
|
you could have achieved the same result using multiple filters on the same
|
||||||
|
``QuerySet``, but sometimes the ability to combine individual ``QuerySet``
|
||||||
|
instance is useful.
|
||||||
|
|
||||||
|
Be careful, if you are using ``extra()`` to add custom handling to your
|
||||||
|
``QuerySet`` however. All the ``extra()`` components are merged and the result
|
||||||
|
may or may not make sense. If you are using custom SQL fragments in your
|
||||||
|
``extra()`` calls, Django will not inspect these fragments to see if they need
|
||||||
|
to be rewritten because of changes in the merged query. So test the effects
|
||||||
|
carefully. Also realise that if you are combining two ``QuerySets`` with
|
||||||
|
``|``, you cannot use ``extra(select=...)`` or ``extra(where=...)`` on *both*
|
||||||
|
``QuerySets``. You can only use those calls on one or the other (Django will
|
||||||
|
raise a ``ValueError`` if you try to use this incorrectly).
|
||||||
|
|
||||||
QuerySet methods that return new QuerySets
|
QuerySet methods that return new QuerySets
|
||||||
------------------------------------------
|
------------------------------------------
|
||||||
|
|
||||||
|
@ -14,9 +14,14 @@ custom Django application.
|
|||||||
A flatpage can use a custom template or a default, systemwide flatpage
|
A flatpage can use a custom template or a default, systemwide flatpage
|
||||||
template. It can be associated with one, or multiple, sites.
|
template. It can be associated with one, or multiple, sites.
|
||||||
|
|
||||||
|
**New in Django development version**
|
||||||
|
|
||||||
|
The content field may optionally be left blank if you prefer to put your
|
||||||
|
content in a custom template.
|
||||||
|
|
||||||
Here are some examples of flatpages on Django-powered sites:
|
Here are some examples of flatpages on Django-powered sites:
|
||||||
|
|
||||||
* http://www.chicagocrime.org/about/
|
* http://www.everyblock.com/about/
|
||||||
* http://www.lawrence.com/about/contact/
|
* http://www.lawrence.com/about/contact/
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
|
@ -1677,7 +1677,7 @@ still only creating one database table per child model at the database level.
|
|||||||
|
|
||||||
When an abstract base class is created, Django makes any ``Meta`` inner class
|
When an abstract base class is created, Django makes any ``Meta`` inner class
|
||||||
you declared on the base class available as an attribute. If a child class
|
you declared on the base class available as an attribute. If a child class
|
||||||
does not declared its own ``Meta`` class, it will inherit the parent's
|
does not declare its own ``Meta`` class, it will inherit the parent's
|
||||||
``Meta``. If the child wants to extend the parent's ``Meta`` class, it can
|
``Meta``. If the child wants to extend the parent's ``Meta`` class, it can
|
||||||
subclass it. For example::
|
subclass it. For example::
|
||||||
|
|
||||||
|
@ -797,6 +797,37 @@ another test, or by the order of test execution.
|
|||||||
.. _dumpdata documentation: ../django-admin/#dumpdata-appname-appname
|
.. _dumpdata documentation: ../django-admin/#dumpdata-appname-appname
|
||||||
.. _loaddata documentation: ../django-admin/#loaddata-fixture-fixture
|
.. _loaddata documentation: ../django-admin/#loaddata-fixture-fixture
|
||||||
|
|
||||||
|
URLconf configuration
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
**New in Django development version**
|
||||||
|
|
||||||
|
If your application provides views, you may want to include tests that
|
||||||
|
use the test client to exercise those views. However, an end user is free
|
||||||
|
to deploy the views in your application at any URL of their choosing.
|
||||||
|
This means that your tests can't rely upon the fact that your views will
|
||||||
|
be available at a particular URL.
|
||||||
|
|
||||||
|
In order to provide a reliable URL space for your test,
|
||||||
|
``django.test.TestCase`` provides the ability to customize the URLconf
|
||||||
|
configuration for the duration of the execution of a test suite.
|
||||||
|
If your ``TestCase`` instance defines an ``urls`` attribute, the
|
||||||
|
``TestCase`` will use the value of that attribute as the ``ROOT_URLCONF``
|
||||||
|
for the duration of that test.
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
from django.test import TestCase
|
||||||
|
|
||||||
|
class TestMyViews(TestCase):
|
||||||
|
urls = 'myapp.test_urls'
|
||||||
|
|
||||||
|
def testIndexPageView(self):
|
||||||
|
# Here you'd test your view using ``Client``.
|
||||||
|
|
||||||
|
This test case will use the contents of ``myapp.test_urls`` as the
|
||||||
|
URLconf for the duration of the test case.
|
||||||
|
|
||||||
Emptying the test outbox
|
Emptying the test outbox
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
0
tests/regressiontests/extra_regress/__init__.py
Normal file
0
tests/regressiontests/extra_regress/__init__.py
Normal file
55
tests/regressiontests/extra_regress/models.py
Normal file
55
tests/regressiontests/extra_regress/models.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
import copy
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
from django.db.models.query import Q
|
||||||
|
|
||||||
|
|
||||||
|
class RevisionableModel(models.Model):
|
||||||
|
base = models.ForeignKey('self', null=True)
|
||||||
|
title = models.CharField(blank=True, max_length=255)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s (%s, %s)" % (self.title, self.id, self.base.id)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
super(RevisionableModel, self).save()
|
||||||
|
if not self.base:
|
||||||
|
self.base = self
|
||||||
|
super(RevisionableModel, self).save()
|
||||||
|
|
||||||
|
def new_revision(self):
|
||||||
|
new_revision = copy.copy(self)
|
||||||
|
new_revision.pk = None
|
||||||
|
return new_revision
|
||||||
|
|
||||||
|
__test__ = {"API_TESTS": """
|
||||||
|
### Regression tests for #7314 and #7372
|
||||||
|
|
||||||
|
>>> rm = RevisionableModel.objects.create(title='First Revision')
|
||||||
|
>>> rm.pk, rm.base.pk
|
||||||
|
(1, 1)
|
||||||
|
|
||||||
|
>>> rm2 = rm.new_revision()
|
||||||
|
>>> rm2.title = "Second Revision"
|
||||||
|
>>> rm2.save()
|
||||||
|
>>> print u"%s of %s" % (rm2.title, rm2.base.title)
|
||||||
|
Second Revision of First Revision
|
||||||
|
|
||||||
|
>>> rm2.pk, rm2.base.pk
|
||||||
|
(2, 1)
|
||||||
|
|
||||||
|
Queryset to match most recent revision:
|
||||||
|
>>> qs = RevisionableModel.objects.extra(where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s AS rev GROUP BY rev.base_id)" % {'table': RevisionableModel._meta.db_table,}],)
|
||||||
|
>>> qs
|
||||||
|
[<RevisionableModel: Second Revision (2, 1)>]
|
||||||
|
|
||||||
|
Queryset to search for string in title:
|
||||||
|
>>> qs2 = RevisionableModel.objects.filter(title__contains="Revision")
|
||||||
|
>>> qs2
|
||||||
|
[<RevisionableModel: First Revision (1, 1)>, <RevisionableModel: Second Revision (2, 1)>]
|
||||||
|
|
||||||
|
Following queryset should return the most recent revision:
|
||||||
|
>>> qs & qs2
|
||||||
|
[<RevisionableModel: Second Revision (2, 1)>]
|
||||||
|
|
||||||
|
"""}
|
@ -0,0 +1,83 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"pk": 6,
|
||||||
|
"model": "fixtures_regress.channel",
|
||||||
|
"fields": {
|
||||||
|
"name": "Business"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"pk": 1,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 1",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 2,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 2",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 3,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 3",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 4,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 4",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"pk": 5,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 5",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 6,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 6",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 7,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 7",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 8,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Article Title 8",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"pk": 9,
|
||||||
|
"model": "fixtures_regress.article",
|
||||||
|
"fields": {
|
||||||
|
"title": "Yet Another Article",
|
||||||
|
"channels": [6]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,4 @@
|
|||||||
|
[
|
||||||
|
{"pk": 1, "model": "fixtures_regress.parent", "fields": {"name": "fred"}},
|
||||||
|
{"pk": 1, "model": "fixtures_regress.child", "fields": {"data": "apple"}}
|
||||||
|
]
|
@ -38,6 +38,22 @@ class Absolute(models.Model):
|
|||||||
super(Absolute, self).__init__(*args, **kwargs)
|
super(Absolute, self).__init__(*args, **kwargs)
|
||||||
Absolute.load_count += 1
|
Absolute.load_count += 1
|
||||||
|
|
||||||
|
class Parent(models.Model):
|
||||||
|
name = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
class Child(Parent):
|
||||||
|
data = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
# Models to regresison check #7572
|
||||||
|
class Channel(models.Model):
|
||||||
|
name = models.CharField(max_length=255)
|
||||||
|
|
||||||
|
class Article(models.Model):
|
||||||
|
title = models.CharField(max_length=255)
|
||||||
|
channels = models.ManyToManyField(Channel)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ('id',)
|
||||||
|
|
||||||
__test__ = {'API_TESTS':"""
|
__test__ = {'API_TESTS':"""
|
||||||
>>> from django.core import management
|
>>> from django.core import management
|
||||||
@ -94,4 +110,28 @@ No fixture data found for 'bad_fixture2'. (File format may be invalid.)
|
|||||||
|
|
||||||
>>> sys.stderr = savestderr
|
>>> sys.stderr = savestderr
|
||||||
|
|
||||||
|
###############################################
|
||||||
|
# Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't
|
||||||
|
# ascend to parent models when inheritance is used (since they are treated
|
||||||
|
# individually).
|
||||||
|
|
||||||
|
>>> management.call_command('loaddata', 'model-inheritance.json', verbosity=0)
|
||||||
|
|
||||||
|
###############################################
|
||||||
|
# Test for ticket #7572 -- MySQL has a problem if the same connection is
|
||||||
|
# used to create tables, load data, and then query over that data.
|
||||||
|
# To compensate, we close the connection after running loaddata.
|
||||||
|
# This ensures that a new connection is opened when test queries are issued.
|
||||||
|
|
||||||
|
>>> management.call_command('loaddata', 'big-fixture.json', verbosity=0)
|
||||||
|
|
||||||
|
>>> articles = Article.objects.exclude(id=9)
|
||||||
|
>>> articles.values_list('id', flat=True)
|
||||||
|
[1, 2, 3, 4, 5, 6, 7, 8]
|
||||||
|
|
||||||
|
# Just for good measure, run the same query again. Under the influence of
|
||||||
|
# ticket #7572, this will give a different result to the previous call.
|
||||||
|
>>> articles.values_list('id', flat=True)
|
||||||
|
[1, 2, 3, 4, 5, 6, 7, 8]
|
||||||
|
|
||||||
"""}
|
"""}
|
||||||
|
@ -237,7 +237,7 @@ ValidationError: [u'REQUIRED']
|
|||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
...
|
...
|
||||||
ValidationError: [u'INVALID']
|
ValidationError: [u'INVALID']
|
||||||
>>> f.clean('http://www.jfoiwjfoi23jfoijoaijfoiwjofiwjefewl.com')
|
>>> f.clean('http://www.broken.djangoproject.com')
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
...
|
...
|
||||||
ValidationError: [u'INVALID LINK']
|
ValidationError: [u'INVALID LINK']
|
||||||
|
@ -887,7 +887,7 @@ u'http://www.google.com'
|
|||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
...
|
...
|
||||||
ValidationError: [u'Enter a valid URL.']
|
ValidationError: [u'Enter a valid URL.']
|
||||||
>>> f.clean('http://www.jfoiwjfoi23jfoijoaijfoiwjofiwjefewl.com') # bad domain
|
>>> f.clean('http://www.broken.djangoproject.com') # bad domain
|
||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
...
|
...
|
||||||
ValidationError: [u'This URL appears to be a broken link.']
|
ValidationError: [u'This URL appears to be a broken link.']
|
||||||
@ -937,18 +937,24 @@ ValidationError: [u'This field is required.']
|
|||||||
>>> f.clean(True)
|
>>> f.clean(True)
|
||||||
True
|
True
|
||||||
>>> f.clean(False)
|
>>> f.clean(False)
|
||||||
False
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: [u'This field is required.']
|
||||||
>>> f.clean(1)
|
>>> f.clean(1)
|
||||||
True
|
True
|
||||||
>>> f.clean(0)
|
>>> f.clean(0)
|
||||||
False
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: [u'This field is required.']
|
||||||
>>> f.clean('Django rocks')
|
>>> f.clean('Django rocks')
|
||||||
True
|
True
|
||||||
|
|
||||||
>>> f.clean('True')
|
>>> f.clean('True')
|
||||||
True
|
True
|
||||||
>>> f.clean('False')
|
>>> f.clean('False')
|
||||||
False
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
ValidationError: [u'This field is required.']
|
||||||
|
|
||||||
>>> f = BooleanField(required=False)
|
>>> f = BooleanField(required=False)
|
||||||
>>> f.clean('')
|
>>> f.clean('')
|
||||||
|
@ -28,6 +28,24 @@ class Child(models.Model):
|
|||||||
parent = models.ForeignKey(Parent)
|
parent = models.ForeignKey(Parent)
|
||||||
|
|
||||||
|
|
||||||
|
# Multiple paths to the same model (#7110, #7125)
|
||||||
|
class Category(models.Model):
|
||||||
|
name = models.CharField(max_length=20)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
class Record(models.Model):
|
||||||
|
category = models.ForeignKey(Category)
|
||||||
|
|
||||||
|
class Relation(models.Model):
|
||||||
|
left = models.ForeignKey(Record, related_name='left_set')
|
||||||
|
right = models.ForeignKey(Record, related_name='right_set')
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s - %s" % (self.left.category.name, self.right.category.name)
|
||||||
|
|
||||||
|
|
||||||
__test__ = {'API_TESTS':"""
|
__test__ = {'API_TESTS':"""
|
||||||
>>> Third.objects.create(id='3', name='An example')
|
>>> Third.objects.create(id='3', name='An example')
|
||||||
<Third: Third object>
|
<Third: Third object>
|
||||||
@ -73,4 +91,26 @@ Traceback (most recent call last):
|
|||||||
...
|
...
|
||||||
ValueError: Cannot assign "<First: First object>": "Child.parent" must be a "Parent" instance.
|
ValueError: Cannot assign "<First: First object>": "Child.parent" must be a "Parent" instance.
|
||||||
|
|
||||||
|
# Test of multiple ForeignKeys to the same model (bug #7125)
|
||||||
|
|
||||||
|
>>> c1 = Category.objects.create(name='First')
|
||||||
|
>>> c2 = Category.objects.create(name='Second')
|
||||||
|
>>> c3 = Category.objects.create(name='Third')
|
||||||
|
>>> r1 = Record.objects.create(category=c1)
|
||||||
|
>>> r2 = Record.objects.create(category=c1)
|
||||||
|
>>> r3 = Record.objects.create(category=c2)
|
||||||
|
>>> r4 = Record.objects.create(category=c2)
|
||||||
|
>>> r5 = Record.objects.create(category=c3)
|
||||||
|
>>> r = Relation.objects.create(left=r1, right=r2)
|
||||||
|
>>> r = Relation.objects.create(left=r3, right=r4)
|
||||||
|
>>> r = Relation.objects.create(left=r1, right=r3)
|
||||||
|
>>> r = Relation.objects.create(left=r5, right=r2)
|
||||||
|
>>> r = Relation.objects.create(left=r3, right=r2)
|
||||||
|
|
||||||
|
>>> Relation.objects.filter(left__category__name__in=['First'], right__category__name__in=['Second'])
|
||||||
|
[<Relation: First - Second>]
|
||||||
|
|
||||||
|
>>> Category.objects.filter(record__left_set__right__category__name='Second').order_by('name')
|
||||||
|
[<Category: First>, <Category: Second>]
|
||||||
|
|
||||||
"""}
|
"""}
|
||||||
|
@ -15,4 +15,21 @@ Decimal("3.14")
|
|||||||
Traceback (most recent call last):
|
Traceback (most recent call last):
|
||||||
...
|
...
|
||||||
ValidationError: [u'This value must be a decimal number.']
|
ValidationError: [u'This value must be a decimal number.']
|
||||||
|
|
||||||
|
>>> f = DecimalField(max_digits=5, decimal_places=1)
|
||||||
|
>>> x = f.to_python(2)
|
||||||
|
>>> y = f.to_python('2.6')
|
||||||
|
|
||||||
|
>>> f.get_db_prep_save(x)
|
||||||
|
u'2.0'
|
||||||
|
>>> f.get_db_prep_save(y)
|
||||||
|
u'2.6'
|
||||||
|
>>> f.get_db_prep_save(None)
|
||||||
|
>>> f.get_db_prep_lookup('exact', x)
|
||||||
|
[u'2.0']
|
||||||
|
>>> f.get_db_prep_lookup('exact', y)
|
||||||
|
[u'2.6']
|
||||||
|
>>> f.get_db_prep_lookup('exact', None)
|
||||||
|
[None]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -131,4 +131,26 @@ __test__ = {'API_TESTS':"""
|
|||||||
>>> Child.objects.dates('created', 'month')
|
>>> Child.objects.dates('created', 'month')
|
||||||
[datetime.datetime(2008, 6, 1, 0, 0)]
|
[datetime.datetime(2008, 6, 1, 0, 0)]
|
||||||
|
|
||||||
|
# Regression test for #7276: calling delete() on a model with multi-table
|
||||||
|
# inheritance should delete the associated rows from any ancestor tables, as
|
||||||
|
# well as any descendent objects.
|
||||||
|
|
||||||
|
>>> ident = ItalianRestaurant.objects.all()[0].id
|
||||||
|
>>> Place.objects.get(pk=ident)
|
||||||
|
<Place: Guido's All New House of Pasta the place>
|
||||||
|
>>> xx = Restaurant.objects.create(name='a', address='xx', serves_hot_dogs=True, serves_pizza=False)
|
||||||
|
|
||||||
|
# This should delete both Restuarants, plus the related places, plus the ItalianRestaurant.
|
||||||
|
>>> Restaurant.objects.all().delete()
|
||||||
|
|
||||||
|
>>> Place.objects.get(pk=ident)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
DoesNotExist: Place matching query does not exist.
|
||||||
|
|
||||||
|
>>> ItalianRestaurant.objects.get(pk=ident)
|
||||||
|
Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
DoesNotExist: ItalianRestaurant matching query does not exist.
|
||||||
|
|
||||||
"""}
|
"""}
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
"""
|
||||||
|
Regression tests for the interaction between model inheritance and
|
||||||
|
select_related().
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
class Place(models.Model):
|
||||||
|
name = models.CharField(max_length=50)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ('name',)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s the place" % self.name
|
||||||
|
|
||||||
|
class Restaurant(Place):
|
||||||
|
serves_sushi = models.BooleanField()
|
||||||
|
serves_steak = models.BooleanField()
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s the restaurant" % self.name
|
||||||
|
|
||||||
|
class Person(models.Model):
|
||||||
|
name = models.CharField(max_length=50)
|
||||||
|
favorite_restaurant = models.ForeignKey(Restaurant)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
__test__ = {'API_TESTS':"""
|
||||||
|
Regression test for #7246
|
||||||
|
|
||||||
|
>>> r1 = Restaurant.objects.create(name="Nobu", serves_sushi=True, serves_steak=False)
|
||||||
|
>>> r2 = Restaurant.objects.create(name="Craft", serves_sushi=False, serves_steak=True)
|
||||||
|
>>> p1 = Person.objects.create(name="John", favorite_restaurant=r1)
|
||||||
|
>>> p2 = Person.objects.create(name="Jane", favorite_restaurant=r2)
|
||||||
|
|
||||||
|
>>> Person.objects.order_by('name').select_related()
|
||||||
|
[<Person: Jane>, <Person: John>]
|
||||||
|
|
||||||
|
>>> jane = Person.objects.order_by('name').select_related('favorite_restaurant')[0]
|
||||||
|
>>> jane.favorite_restaurant.name
|
||||||
|
u'Craft'
|
||||||
|
|
||||||
|
"""}
|
||||||
|
|
@ -3,13 +3,15 @@ Various complex queries that have been problematic in the past.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import pickle
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.query import Q
|
from django.db.models.query import Q
|
||||||
|
|
||||||
class Tag(models.Model):
|
class Tag(models.Model):
|
||||||
name = models.CharField(max_length=10)
|
name = models.CharField(max_length=10)
|
||||||
parent = models.ForeignKey('self', blank=True, null=True)
|
parent = models.ForeignKey('self', blank=True, null=True,
|
||||||
|
related_name='children')
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -24,6 +26,14 @@ class Note(models.Model):
|
|||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return self.note
|
return self.note
|
||||||
|
|
||||||
|
class Annotation(models.Model):
|
||||||
|
name = models.CharField(max_length=10)
|
||||||
|
tag = models.ForeignKey(Tag)
|
||||||
|
notes = models.ManyToManyField(Note)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
class ExtraInfo(models.Model):
|
class ExtraInfo(models.Model):
|
||||||
info = models.CharField(max_length=100)
|
info = models.CharField(max_length=100)
|
||||||
note = models.ForeignKey(Note)
|
note = models.ForeignKey(Note)
|
||||||
@ -162,85 +172,67 @@ class Child(models.Model):
|
|||||||
person = models.OneToOneField(Member, primary_key=True)
|
person = models.OneToOneField(Member, primary_key=True)
|
||||||
parent = models.ForeignKey(Member, related_name="children")
|
parent = models.ForeignKey(Member, related_name="children")
|
||||||
|
|
||||||
|
# Custom primary keys interfered with ordering in the past.
|
||||||
|
class CustomPk(models.Model):
|
||||||
|
name = models.CharField(max_length=10, primary_key=True)
|
||||||
|
extra = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ['name', 'extra']
|
||||||
|
|
||||||
|
class Related(models.Model):
|
||||||
|
custom = models.ForeignKey(CustomPk)
|
||||||
|
|
||||||
|
|
||||||
__test__ = {'API_TESTS':"""
|
__test__ = {'API_TESTS':"""
|
||||||
>>> t1 = Tag(name='t1')
|
>>> t1 = Tag.objects.create(name='t1')
|
||||||
>>> t1.save()
|
>>> t2 = Tag.objects.create(name='t2', parent=t1)
|
||||||
>>> t2 = Tag(name='t2', parent=t1)
|
>>> t3 = Tag.objects.create(name='t3', parent=t1)
|
||||||
>>> t2.save()
|
>>> t4 = Tag.objects.create(name='t4', parent=t3)
|
||||||
>>> t3 = Tag(name='t3', parent=t1)
|
>>> t5 = Tag.objects.create(name='t5', parent=t3)
|
||||||
>>> t3.save()
|
|
||||||
>>> t4 = Tag(name='t4', parent=t3)
|
|
||||||
>>> t4.save()
|
|
||||||
>>> t5 = Tag(name='t5', parent=t3)
|
|
||||||
>>> t5.save()
|
|
||||||
|
|
||||||
>>> n1 = Note(note='n1', misc='foo')
|
>>> n1 = Note.objects.create(note='n1', misc='foo')
|
||||||
>>> n1.save()
|
>>> n2 = Note.objects.create(note='n2', misc='bar')
|
||||||
>>> n2 = Note(note='n2', misc='bar')
|
>>> n3 = Note.objects.create(note='n3', misc='foo')
|
||||||
>>> n2.save()
|
|
||||||
>>> n3 = Note(note='n3', misc='foo')
|
|
||||||
>>> n3.save()
|
|
||||||
|
|
||||||
Create these out of order so that sorting by 'id' will be different to sorting
|
Create these out of order so that sorting by 'id' will be different to sorting
|
||||||
by 'info'. Helps detect some problems later.
|
by 'info'. Helps detect some problems later.
|
||||||
>>> e2 = ExtraInfo(info='e2', note=n2)
|
>>> e2 = ExtraInfo.objects.create(info='e2', note=n2)
|
||||||
>>> e2.save()
|
>>> e1 = ExtraInfo.objects.create(info='e1', note=n1)
|
||||||
>>> e1 = ExtraInfo(info='e1', note=n1)
|
|
||||||
>>> e1.save()
|
|
||||||
|
|
||||||
>>> a1 = Author(name='a1', num=1001, extra=e1)
|
>>> a1 = Author.objects.create(name='a1', num=1001, extra=e1)
|
||||||
>>> a1.save()
|
>>> a2 = Author.objects.create(name='a2', num=2002, extra=e1)
|
||||||
>>> a2 = Author(name='a2', num=2002, extra=e1)
|
>>> a3 = Author.objects.create(name='a3', num=3003, extra=e2)
|
||||||
>>> a2.save()
|
>>> a4 = Author.objects.create(name='a4', num=4004, extra=e2)
|
||||||
>>> a3 = Author(name='a3', num=3003, extra=e2)
|
|
||||||
>>> a3.save()
|
|
||||||
>>> a4 = Author(name='a4', num=4004, extra=e2)
|
|
||||||
>>> a4.save()
|
|
||||||
|
|
||||||
>>> time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
|
>>> time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
|
||||||
>>> time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
|
>>> time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
|
||||||
>>> time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
|
>>> time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
|
||||||
>>> time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
|
>>> time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
|
||||||
>>> i1 = Item(name='one', created=time1, modified=time1, creator=a1, note=n3)
|
>>> i1 = Item.objects.create(name='one', created=time1, modified=time1, creator=a1, note=n3)
|
||||||
>>> i1.save()
|
|
||||||
>>> i1.tags = [t1, t2]
|
>>> i1.tags = [t1, t2]
|
||||||
>>> i2 = Item(name='two', created=time2, creator=a2, note=n2)
|
>>> i2 = Item.objects.create(name='two', created=time2, creator=a2, note=n2)
|
||||||
>>> i2.save()
|
|
||||||
>>> i2.tags = [t1, t3]
|
>>> i2.tags = [t1, t3]
|
||||||
>>> i3 = Item(name='three', created=time3, creator=a2, note=n3)
|
>>> i3 = Item.objects.create(name='three', created=time3, creator=a2, note=n3)
|
||||||
>>> i3.save()
|
>>> i4 = Item.objects.create(name='four', created=time4, creator=a4, note=n3)
|
||||||
>>> i4 = Item(name='four', created=time4, creator=a4, note=n3)
|
|
||||||
>>> i4.save()
|
|
||||||
>>> i4.tags = [t4]
|
>>> i4.tags = [t4]
|
||||||
|
|
||||||
>>> r1 = Report(name='r1', creator=a1)
|
>>> r1 = Report.objects.create(name='r1', creator=a1)
|
||||||
>>> r1.save()
|
>>> r2 = Report.objects.create(name='r2', creator=a3)
|
||||||
>>> r2 = Report(name='r2', creator=a3)
|
>>> r3 = Report.objects.create(name='r3')
|
||||||
>>> r2.save()
|
|
||||||
>>> r3 = Report(name='r3')
|
|
||||||
>>> r3.save()
|
|
||||||
|
|
||||||
Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
|
Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
|
||||||
will be rank3, rank2, rank1.
|
will be rank3, rank2, rank1.
|
||||||
>>> rank1 = Ranking(rank=2, author=a2)
|
>>> rank1 = Ranking.objects.create(rank=2, author=a2)
|
||||||
>>> rank1.save()
|
>>> rank2 = Ranking.objects.create(rank=1, author=a3)
|
||||||
>>> rank2 = Ranking(rank=1, author=a3)
|
>>> rank3 = Ranking.objects.create(rank=3, author=a1)
|
||||||
>>> rank2.save()
|
|
||||||
>>> rank3 = Ranking(rank=3, author=a1)
|
|
||||||
>>> rank3.save()
|
|
||||||
|
|
||||||
>>> c1 = Cover(title="first", item=i4)
|
>>> c1 = Cover.objects.create(title="first", item=i4)
|
||||||
>>> c1.save()
|
>>> c2 = Cover.objects.create(title="second", item=i2)
|
||||||
>>> c2 = Cover(title="second", item=i2)
|
|
||||||
>>> c2.save()
|
|
||||||
|
|
||||||
>>> n1 = Number(num=4)
|
>>> num1 = Number.objects.create(num=4)
|
||||||
>>> n1.save()
|
>>> num2 = Number.objects.create(num=8)
|
||||||
>>> n2 = Number(num=8)
|
>>> num3 = Number.objects.create(num=12)
|
||||||
>>> n2.save()
|
|
||||||
>>> n3 = Number(num=12)
|
|
||||||
>>> n3.save()
|
|
||||||
|
|
||||||
Bug #1050
|
Bug #1050
|
||||||
>>> Item.objects.filter(tags__isnull=True)
|
>>> Item.objects.filter(tags__isnull=True)
|
||||||
@ -346,6 +338,10 @@ Bug #1878, #2939
|
|||||||
4
|
4
|
||||||
>>> xx.delete()
|
>>> xx.delete()
|
||||||
|
|
||||||
|
Bug #7323
|
||||||
|
>>> Item.objects.values('creator', 'name').count()
|
||||||
|
4
|
||||||
|
|
||||||
Bug #2253
|
Bug #2253
|
||||||
>>> q1 = Item.objects.order_by('name')
|
>>> q1 = Item.objects.order_by('name')
|
||||||
>>> q2 = Item.objects.filter(id=i1.id)
|
>>> q2 = Item.objects.filter(id=i1.id)
|
||||||
@ -387,6 +383,10 @@ Bug #4510
|
|||||||
>>> Author.objects.filter(report__name='r1')
|
>>> Author.objects.filter(report__name='r1')
|
||||||
[<Author: a1>]
|
[<Author: a1>]
|
||||||
|
|
||||||
|
Bug #7378
|
||||||
|
>>> a1.report_set.all()
|
||||||
|
[<Report: r1>]
|
||||||
|
|
||||||
Bug #5324, #6704
|
Bug #5324, #6704
|
||||||
>>> Item.objects.filter(tags__name='t4')
|
>>> Item.objects.filter(tags__name='t4')
|
||||||
[<Item: four>]
|
[<Item: four>]
|
||||||
@ -791,5 +791,19 @@ Empty querysets can be merged with others.
|
|||||||
>>> Note.objects.all() & Note.objects.none()
|
>>> Note.objects.all() & Note.objects.none()
|
||||||
[]
|
[]
|
||||||
|
|
||||||
|
Bug #7204, #7506 -- make sure querysets with related fields can be pickled. If
|
||||||
|
this doesn't crash, it's a Good Thing.
|
||||||
|
>>> out = pickle.dumps(Item.objects.all())
|
||||||
|
|
||||||
|
Bug #7277
|
||||||
|
>>> ann1 = Annotation.objects.create(name='a1', tag=t1)
|
||||||
|
>>> ann1.notes.add(n1)
|
||||||
|
>>> n1.annotation_set.filter(Q(tag=t5) | Q(tag__children=t5) | Q(tag__children__children=t5))
|
||||||
|
[<Annotation: a1>]
|
||||||
|
|
||||||
|
Bug #7371
|
||||||
|
>>> Related.objects.order_by('custom')
|
||||||
|
[]
|
||||||
|
|
||||||
"""}
|
"""}
|
||||||
|
|
||||||
|
60
tests/regressiontests/select_related_regress/models.py
Normal file
60
tests/regressiontests/select_related_regress/models.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from django.db import models
|
||||||
|
|
||||||
|
class Building(models.Model):
|
||||||
|
name = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"Building: %s" % self.name
|
||||||
|
|
||||||
|
class Device(models.Model):
|
||||||
|
building = models.ForeignKey('Building')
|
||||||
|
name = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"device '%s' in building %s" % (self.name, self.building)
|
||||||
|
|
||||||
|
class Port(models.Model):
|
||||||
|
device = models.ForeignKey('Device')
|
||||||
|
number = models.CharField(max_length=10)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s/%s" % (self.device.name, self.number)
|
||||||
|
|
||||||
|
class Connection(models.Model):
|
||||||
|
start = models.ForeignKey(Port, related_name='connection_start',
|
||||||
|
unique=True)
|
||||||
|
end = models.ForeignKey(Port, related_name='connection_end', unique=True)
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
return u"%s to %s" % (self.start, self.end)
|
||||||
|
|
||||||
|
__test__ = {'API_TESTS': """
|
||||||
|
Regression test for bug #7110. When using select_related(), we must query the
|
||||||
|
Device and Building tables using two different aliases (each) in order to
|
||||||
|
differentiate the start and end Connection fields. The net result is that both
|
||||||
|
the "connections = ..." queries here should give the same results.
|
||||||
|
|
||||||
|
>>> b=Building.objects.create(name='101')
|
||||||
|
>>> dev1=Device.objects.create(name="router", building=b)
|
||||||
|
>>> dev2=Device.objects.create(name="switch", building=b)
|
||||||
|
>>> dev3=Device.objects.create(name="server", building=b)
|
||||||
|
>>> port1=Port.objects.create(number='4',device=dev1)
|
||||||
|
>>> port2=Port.objects.create(number='7',device=dev2)
|
||||||
|
>>> port3=Port.objects.create(number='1',device=dev3)
|
||||||
|
>>> c1=Connection.objects.create(start=port1, end=port2)
|
||||||
|
>>> c2=Connection.objects.create(start=port2, end=port3)
|
||||||
|
|
||||||
|
>>> connections=Connection.objects.filter(start__device__building=b, end__device__building=b).order_by('id')
|
||||||
|
>>> [(c.id, unicode(c.start), unicode(c.end)) for c in connections]
|
||||||
|
[(1, u'router/4', u'switch/7'), (2, u'switch/7', u'server/1')]
|
||||||
|
|
||||||
|
>>> connections=Connection.objects.filter(start__device__building=b, end__device__building=b).select_related().order_by('id')
|
||||||
|
>>> [(c.id, unicode(c.start), unicode(c.end)) for c in connections]
|
||||||
|
[(1, u'router/4', u'switch/7'), (2, u'switch/7', u'server/1')]
|
||||||
|
|
||||||
|
# This final query should only join seven tables (port, device and building
|
||||||
|
# twice each, plus connection once).
|
||||||
|
>>> connections.query.count_active_tables()
|
||||||
|
7
|
||||||
|
|
||||||
|
"""}
|
@ -97,6 +97,12 @@ __test__ = {'API_TESTS': ur"""
|
|||||||
>>> Article.objects.get(text__exact='The quick brown fox jumps over the lazy dog.')
|
>>> Article.objects.get(text__exact='The quick brown fox jumps over the lazy dog.')
|
||||||
<Article: Article Test>
|
<Article: Article Test>
|
||||||
|
|
||||||
|
# Regression tests for #2170: test case sensitiveness
|
||||||
|
>>> Article.objects.filter(text__exact='tHe qUick bRown fOx jUmps over tHe lazy dog.')
|
||||||
|
[]
|
||||||
|
>>> Article.objects.filter(text__iexact='tHe qUick bRown fOx jUmps over tHe lazy dog.')
|
||||||
|
[<Article: Article Test>]
|
||||||
|
|
||||||
>>> Article.objects.get(text__contains='quick brown fox')
|
>>> Article.objects.get(text__contains='quick brown fox')
|
||||||
<Article: Article Test>
|
<Article: Article Test>
|
||||||
|
|
||||||
|
@ -318,3 +318,22 @@ class ExceptionTests(TestCase):
|
|||||||
self.client.get("/test_client_regress/staff_only/")
|
self.client.get("/test_client_regress/staff_only/")
|
||||||
except SuspiciousOperation:
|
except SuspiciousOperation:
|
||||||
self.fail("Staff should be able to visit this page")
|
self.fail("Staff should be able to visit this page")
|
||||||
|
|
||||||
|
# We need two different tests to check URLconf subsitution - one to check
|
||||||
|
# it was changed, and another one (without self.urls) to check it was reverted on
|
||||||
|
# teardown. This pair of tests relies upon the alphabetical ordering of test execution.
|
||||||
|
class UrlconfSubstitutionTests(TestCase):
|
||||||
|
urls = 'regressiontests.test_client_regress.urls'
|
||||||
|
|
||||||
|
def test_urlconf_was_changed(self):
|
||||||
|
"TestCase can enforce a custom URLConf on a per-test basis"
|
||||||
|
url = reverse('arg_view', args=['somename'])
|
||||||
|
self.assertEquals(url, '/arg_view/somename/')
|
||||||
|
|
||||||
|
# This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the
|
||||||
|
# name is to ensure alphabetical ordering.
|
||||||
|
class zzUrlconfSubstitutionTests(TestCase):
|
||||||
|
def test_urlconf_was_reverted(self):
|
||||||
|
"URLconf is reverted to original value after modification in a TestCase"
|
||||||
|
url = reverse('arg_view', args=['somename'])
|
||||||
|
self.assertEquals(url, '/test_client_regress/arg_view/somename/')
|
||||||
|
Loading…
x
Reference in New Issue
Block a user